[llvm] c7c561e - AMDGPU: Enable ExpandLargeFpConvert for > 64-bit types

Matt Arsenault via llvm-commits llvm-commits at lists.llvm.org
Fri Mar 15 03:39:08 PDT 2024


Author: Matt Arsenault
Date: 2024-03-15T16:08:39+05:30
New Revision: c7c561ef98ad783d257dab3940dd2378ef8760bf

URL: https://github.com/llvm/llvm-project/commit/c7c561ef98ad783d257dab3940dd2378ef8760bf
DIFF: https://github.com/llvm/llvm-project/commit/c7c561ef98ad783d257dab3940dd2378ef8760bf.diff

LOG: AMDGPU: Enable ExpandLargeFpConvert for > 64-bit types

Fixes casts between double/float/half and i128. The pass seems to be
broken for bfloat though. I also believe we could have a better implementation
which attempts to make use the native 32-bit conversion instructions like
the 64-bit expansion does.

Added: 
    llvm/test/CodeGen/AMDGPU/fptoi.i128.ll
    llvm/test/CodeGen/AMDGPU/itofp.i128.ll

Modified: 
    llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp b/llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp
index 8a71550e5532cd..bee43b6c18c880 100644
--- a/llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp
+++ b/llvm/lib/Target/AMDGPU/AMDGPUISelLowering.cpp
@@ -582,6 +582,7 @@ AMDGPUTargetLowering::AMDGPUTargetLowering(const TargetMachine &TM,
 
   setMaxAtomicSizeInBitsSupported(64);
   setMaxDivRemBitWidthSupported(64);
+  setMaxLargeFPConvertBitWidthSupported(64);
 }
 
 bool AMDGPUTargetLowering::mayIgnoreSignedZero(SDValue Op) const {

diff  --git a/llvm/test/CodeGen/AMDGPU/fptoi.i128.ll b/llvm/test/CodeGen/AMDGPU/fptoi.i128.ll
new file mode 100644
index 00000000000000..a229889fe63906
--- /dev/null
+++ b/llvm/test/CodeGen/AMDGPU/fptoi.i128.ll
@@ -0,0 +1,1510 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
+; RUN: llc -global-isel=0 -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefixes=GCN,SDAG %s
+; RUN: llc -global-isel=1 -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefixes=GCN,GISEL %s
+
+define i128 @fptosi_f64_to_i128(double %x) {
+; SDAG-LABEL: fptosi_f64_to_i128:
+; SDAG:       ; %bb.0: ; %fp-to-i-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_mov_b32_e32 v5, v1
+; SDAG-NEXT:    v_bfe_u32 v6, v5, 20, 11
+; SDAG-NEXT:    v_mov_b32_e32 v7, 0
+; SDAG-NEXT:    s_mov_b64 s[4:5], 0x3fe
+; SDAG-NEXT:    v_mov_b32_e32 v4, v0
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[4:5], v[6:7]
+; SDAG-NEXT:    v_mov_b32_e32 v0, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, 0
+; SDAG-NEXT:    v_mov_b32_e32 v1, 0
+; SDAG-NEXT:    v_mov_b32_e32 v3, 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[8:9], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB0_10
+; SDAG-NEXT:  ; %bb.1: ; %fp-to-i-if-end
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 0xfffffb81, v6
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, -1, v7, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v2, vcc, -1, v7, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v3, vcc, -1, v7, vcc
+; SDAG-NEXT:    s_mov_b64 s[6:7], 0xffffff7f
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[6:7], v[0:1]
+; SDAG-NEXT:    v_cmp_lt_i64_e64 s[4:5], -1, v[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, 0, 1, vcc
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    v_cndmask_b32_e64 v1, 0, 1, vcc
+; SDAG-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, v1, v0, vcc
+; SDAG-NEXT:    v_and_b32_e32 v0, 1, v0
+; SDAG-NEXT:    v_cmp_eq_u32_e32 vcc, 1, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_xor_b64 s[10:11], exec, s[6:7]
+; SDAG-NEXT:    s_cbranch_execz .LBB0_7
+; SDAG-NEXT:  ; %bb.2: ; %fp-to-i-if-end9
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, 0, 1, s[4:5]
+; SDAG-NEXT:    v_add_co_u32_e32 v9, vcc, -1, v0
+; SDAG-NEXT:    v_addc_co_u32_e64 v10, s[6:7], 0, -1, vcc
+; SDAG-NEXT:    s_mov_b64 s[6:7], 0x432
+; SDAG-NEXT:    v_and_b32_e32 v0, 0xfffff, v5
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[6:7], v[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e64 v8, -1, 0, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v11, -1, 1, s[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v5, 0x100000, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[6:7]
+; SDAG-NEXT:    s_cbranch_execz .LBB0_4
+; SDAG-NEXT:  ; %bb.3: ; %fp-to-i-if-else
+; SDAG-NEXT:    v_sub_u32_e32 v0, 0x473, v6
+; SDAG-NEXT:    v_add_u32_e32 v2, 0xfffffb8d, v6
+; SDAG-NEXT:    v_add_u32_e32 v7, 0xfffffbcd, v6
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v0, v[4:5]
+; SDAG-NEXT:    v_lshlrev_b64 v[2:3], v2, v[4:5]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v7
+; SDAG-NEXT:    v_cndmask_b32_e32 v1, v3, v1, vcc
+; SDAG-NEXT:    v_cmp_ne_u32_e64 s[6:7], 0, v7
+; SDAG-NEXT:    v_cndmask_b32_e64 v6, 0, v1, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e32 v2, v2, v0, vcc
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v7, v[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, v2, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e32 v12, 0, v0, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v7, 0, v1, vcc
+; SDAG-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v12, v11, 0
+; SDAG-NEXT:    v_mov_b32_e32 v3, 0
+; SDAG-NEXT:    v_mul_lo_u32 v13, v8, v2
+; SDAG-NEXT:    v_mad_u64_u32 v[4:5], s[6:7], v11, v2, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, v1
+; SDAG-NEXT:    v_mul_lo_u32 v6, v11, v6
+; SDAG-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v7, v11, v[2:3]
+; SDAG-NEXT:    v_mul_lo_u32 v10, v10, v12
+; SDAG-NEXT:    v_add3_u32 v5, v5, v6, v13
+; SDAG-NEXT:    v_mov_b32_e32 v6, v2
+; SDAG-NEXT:    v_mov_b32_e32 v2, v3
+; SDAG-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v12, v8, v[1:2]
+; SDAG-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v9, v12, v[4:5]
+; SDAG-NEXT:    v_add_co_u32_e32 v5, vcc, v6, v2
+; SDAG-NEXT:    v_addc_co_u32_e64 v6, s[6:7], 0, 0, vcc
+; SDAG-NEXT:    v_mul_lo_u32 v9, v9, v7
+; SDAG-NEXT:    v_mad_u64_u32 v[5:6], s[6:7], v7, v8, v[5:6]
+; SDAG-NEXT:    ; implicit-def: $vgpr11
+; SDAG-NEXT:    ; implicit-def: $vgpr8
+; SDAG-NEXT:    v_add3_u32 v4, v10, v4, v9
+; SDAG-NEXT:    v_add_co_u32_e32 v2, vcc, v5, v3
+; SDAG-NEXT:    v_addc_co_u32_e32 v3, vcc, v6, v4, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr6_vgpr7
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:    ; implicit-def: $vgpr9
+; SDAG-NEXT:    ; implicit-def: $vgpr10
+; SDAG-NEXT:  .LBB0_4: ; %Flow
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB0_6
+; SDAG-NEXT:  ; %bb.5: ; %fp-to-i-if-then12
+; SDAG-NEXT:    v_sub_u32_e32 v2, 0x433, v6
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v2, v[4:5]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[6:7], 0, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, 0, v0, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v1, 0, v1, vcc
+; SDAG-NEXT:    v_cndmask_b32_e64 v6, v0, v4, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e64 v5, v1, v5, s[6:7]
+; SDAG-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v6, v11, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, 0
+; SDAG-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v5, v11, v[1:2]
+; SDAG-NEXT:    v_mov_b32_e32 v7, v4
+; SDAG-NEXT:    v_mov_b32_e32 v4, v2
+; SDAG-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v6, v8, v[3:4]
+; SDAG-NEXT:    v_add_co_u32_e32 v2, vcc, v7, v2
+; SDAG-NEXT:    v_addc_co_u32_e64 v3, s[6:7], 0, 0, vcc
+; SDAG-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v5, v8, v[2:3]
+; SDAG-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v9, v6, v[2:3]
+; SDAG-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v10, v6, v[3:4]
+; SDAG-NEXT:    v_mad_i32_i24 v3, v9, v5, v3
+; SDAG-NEXT:  .LBB0_6: ; %Flow1
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:  .LBB0_7: ; %Flow2
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[6:7], s[10:11]
+; SDAG-NEXT:  ; %bb.8: ; %fp-to-i-if-then5
+; SDAG-NEXT:    v_bfrev_b32_e32 v0, 1
+; SDAG-NEXT:    v_bfrev_b32_e32 v1, -2
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, -1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v3, v0, v1, s[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v2
+; SDAG-NEXT:    v_mov_b32_e32 v1, v2
+; SDAG-NEXT:  ; %bb.9: ; %Flow3
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:  .LBB0_10: ; %fp-to-i-cleanup
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: fptosi_f64_to_i128:
+; GISEL:       ; %bb.0: ; %fp-to-i-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_mov_b32_e32 v5, v1
+; GISEL-NEXT:    v_mov_b32_e32 v4, v0
+; GISEL-NEXT:    v_lshrrev_b32_e32 v0, 20, v5
+; GISEL-NEXT:    v_and_b32_e32 v6, 0x7ff, v0
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x3ff
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0
+; GISEL-NEXT:    v_mov_b32_e32 v7, 0
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
+; GISEL-NEXT:    s_mov_b64 s[6:7], s[4:5]
+; GISEL-NEXT:    v_mov_b32_e32 v0, s4
+; GISEL-NEXT:    v_mov_b32_e32 v1, s5
+; GISEL-NEXT:    v_mov_b32_e32 v2, s6
+; GISEL-NEXT:    v_mov_b32_e32 v3, s7
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB0_10
+; GISEL-NEXT:  ; %bb.1: ; %fp-to-i-if-end
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 0xfffffb81, v6
+; GISEL-NEXT:    v_mov_b32_e32 v2, 0xffffff80
+; GISEL-NEXT:    v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
+; GISEL-NEXT:    v_mov_b32_e32 v3, 0
+; GISEL-NEXT:    v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
+; GISEL-NEXT:    v_addc_co_u32_e64 v9, s[6:7], 0, -1, s[6:7]
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, 1, vcc
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[8:9]
+; GISEL-NEXT:    v_cmp_lt_i64_e64 s[4:5], -1, v[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e32 v0, 1, v0, vcc
+; GISEL-NEXT:    v_and_b32_e32 v0, 1, v0
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v0
+; GISEL-NEXT:    ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_xor_b64 s[14:15], exec, s[6:7]
+; GISEL-NEXT:    s_cbranch_execz .LBB0_7
+; GISEL-NEXT:  ; %bb.2: ; %fp-to-i-if-end9
+; GISEL-NEXT:    s_xor_b64 s[6:7], s[4:5], -1
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, -1, s[6:7]
+; GISEL-NEXT:    v_and_b32_e32 v0, 1, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v2, 1, v0
+; GISEL-NEXT:    v_cndmask_b32_e64 v1, 0, 1, s[6:7]
+; GISEL-NEXT:    v_lshlrev_b16_e32 v3, 2, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v8, 3, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v9, 4, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v10, 5, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v11, 6, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v12, 7, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v13, 8, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v14, 9, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v15, 10, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v16, 11, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v17, 12, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v18, 13, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v19, 14, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v20, 15, v0
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v2
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v2
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v3
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v3
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v8
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v8
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v9
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v9
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v10
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v10
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v11
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v11
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v12
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v12
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v13
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v13
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v14
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v14
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v15
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v15
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v16
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v16
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v17
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v17
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v18
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v18
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v19
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v19
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v20
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v20
+; GISEL-NEXT:    v_and_b32_e32 v0, 0xffff, v0
+; GISEL-NEXT:    v_and_b32_e32 v1, 0xffff, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v2, 16, v0
+; GISEL-NEXT:    v_lshl_or_b32 v10, v0, 16, v0
+; GISEL-NEXT:    v_or3_b32 v8, v1, v2, 1
+; GISEL-NEXT:    v_or3_b32 v9, v0, v2, 0
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x433
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0
+; GISEL-NEXT:    v_and_b32_e32 v2, 0xfffff, v5
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
+; GISEL-NEXT:    v_or_b32_e32 v5, 0x100000, v2
+; GISEL-NEXT:    ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_xor_b64 s[16:17], exec, s[6:7]
+; GISEL-NEXT:    s_cbranch_execz .LBB0_4
+; GISEL-NEXT:  ; %bb.3: ; %fp-to-i-if-else
+; GISEL-NEXT:    v_add_co_u32_e32 v6, vcc, 0xfffffbcd, v6
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v6, v[4:5]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, 0, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, 0, v1, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v11, v10, 0
+; GISEL-NEXT:    v_subrev_u32_e32 v7, 64, v6
+; GISEL-NEXT:    v_sub_u32_e32 v2, 64, v6
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v2, v[4:5]
+; GISEL-NEXT:    v_lshlrev_b64 v[4:5], v7, v[4:5]
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[6:7], 0, v6
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v12, v9, v[0:1]
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v4, v2, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v13, v2, 0, s[6:7]
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v13, v8, v[6:7]
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[8:9], v11, v8, 0
+; GISEL-NEXT:    v_mov_b32_e32 v2, v6
+; GISEL-NEXT:    v_mul_lo_u32 v6, v11, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[8:9], v11, v9, v[1:2]
+; GISEL-NEXT:    v_mul_lo_u32 v4, v12, v10
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v5, v3, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[10:11], v12, v8, v[1:2]
+; GISEL-NEXT:    v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
+; GISEL-NEXT:    v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v13, v9, v[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v3, v3, 0, s[6:7]
+; GISEL-NEXT:    ; implicit-def: $vgpr10
+; GISEL-NEXT:    ; implicit-def: $vgpr9
+; GISEL-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v3, v8, v[6:7]
+; GISEL-NEXT:    ; implicit-def: $vgpr6
+; GISEL-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; GISEL-NEXT:    ; implicit-def: $vgpr8
+; GISEL-NEXT:  .LBB0_4: ; %Flow
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[8:9], s[16:17]
+; GISEL-NEXT:    s_cbranch_execz .LBB0_6
+; GISEL-NEXT:  ; %bb.5: ; %fp-to-i-if-then12
+; GISEL-NEXT:    v_sub_co_u32_e32 v6, vcc, 0x433, v6
+; GISEL-NEXT:    v_subrev_u32_e32 v2, 64, v6
+; GISEL-NEXT:    v_lshrrev_b64 v[0:1], v6, v[4:5]
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v2, 0
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v0, v2, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v1, v3, v1, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, v0, v4, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v4, v10, 0
+; GISEL-NEXT:    v_cndmask_b32_e32 v5, v1, v5, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v4, v8, 0
+; GISEL-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v5, v9, v[2:3]
+; GISEL-NEXT:    v_mul_lo_u32 v6, v5, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], vcc, v4, v9, v[1:2]
+; GISEL-NEXT:    v_mul_lo_u32 v4, v4, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v5, v8, v[1:2]
+; GISEL-NEXT:    v_addc_co_u32_e64 v3, s[6:7], v3, v4, s[6:7]
+; GISEL-NEXT:    v_addc_co_u32_e32 v3, vcc, v3, v6, vcc
+; GISEL-NEXT:  .LBB0_6: ; %Flow1
+; GISEL-NEXT:    s_or_b64 exec, exec, s[8:9]
+; GISEL-NEXT:  .LBB0_7: ; %Flow2
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[6:7], s[14:15]
+; GISEL-NEXT:    s_cbranch_execz .LBB0_9
+; GISEL-NEXT:  ; %bb.8: ; %fp-to-i-if-then5
+; GISEL-NEXT:    v_cndmask_b32_e64 v1, 0, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v1, 1, v1
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, 1, s[4:5]
+; GISEL-NEXT:    v_lshlrev_b32_e32 v2, 1, v1
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v2
+; GISEL-NEXT:    v_lshlrev_b32_e32 v3, 2, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v4, 3, v1
+; GISEL-NEXT:    v_or_b32_e32 v2, v1, v2
+; GISEL-NEXT:    v_or3_b32 v0, v0, v3, v4
+; GISEL-NEXT:    v_lshlrev_b32_e32 v5, 4, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v6, 5, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v3, v4
+; GISEL-NEXT:    v_or3_b32 v0, v0, v5, v6
+; GISEL-NEXT:    v_lshlrev_b32_e32 v7, 6, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v8, 7, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v5, v6
+; GISEL-NEXT:    v_or3_b32 v0, v0, v7, v8
+; GISEL-NEXT:    v_lshlrev_b32_e32 v9, 8, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v10, 9, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v7, v8
+; GISEL-NEXT:    v_or3_b32 v0, v0, v9, v10
+; GISEL-NEXT:    v_lshlrev_b32_e32 v11, 10, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v12, 11, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v9, v10
+; GISEL-NEXT:    v_or3_b32 v0, v0, v11, v12
+; GISEL-NEXT:    v_lshlrev_b32_e32 v13, 12, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v14, 13, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v11, v12
+; GISEL-NEXT:    v_or3_b32 v0, v0, v13, v14
+; GISEL-NEXT:    v_lshlrev_b32_e32 v15, 14, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v16, 15, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v13, v14
+; GISEL-NEXT:    v_or3_b32 v0, v0, v15, v16
+; GISEL-NEXT:    v_lshlrev_b32_e32 v17, 16, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v18, 17, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v15, v16
+; GISEL-NEXT:    v_or3_b32 v0, v0, v17, v18
+; GISEL-NEXT:    v_lshlrev_b32_e32 v19, 18, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v20, 19, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v17, v18
+; GISEL-NEXT:    v_or3_b32 v0, v0, v19, v20
+; GISEL-NEXT:    v_lshlrev_b32_e32 v3, 20, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v4, 21, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v19, v20
+; GISEL-NEXT:    v_or3_b32 v0, v0, v3, v4
+; GISEL-NEXT:    v_lshlrev_b32_e32 v5, 22, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v6, 23, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v3, v4
+; GISEL-NEXT:    v_or3_b32 v0, v0, v5, v6
+; GISEL-NEXT:    v_lshlrev_b32_e32 v7, 24, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v8, 25, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v5, v6
+; GISEL-NEXT:    v_or3_b32 v0, v0, v7, v8
+; GISEL-NEXT:    v_lshlrev_b32_e32 v9, 26, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v10, 27, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v7, v8
+; GISEL-NEXT:    v_or3_b32 v0, v0, v9, v10
+; GISEL-NEXT:    v_lshlrev_b32_e32 v11, 28, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v12, 29, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v9, v10
+; GISEL-NEXT:    v_or3_b32 v0, v0, v11, v12
+; GISEL-NEXT:    v_lshlrev_b32_e32 v13, 30, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v1, 31, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v11, v12
+; GISEL-NEXT:    v_or3_b32 v0, v0, v13, v1
+; GISEL-NEXT:    v_or3_b32 v1, v2, v13, v1
+; GISEL-NEXT:    v_add_co_u32_e32 v3, vcc, 0x80000000, v1
+; GISEL-NEXT:    v_mov_b32_e32 v2, v1
+; GISEL-NEXT:  .LBB0_9: ; %Flow3
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:  .LBB0_10: ; %fp-to-i-cleanup
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = fptosi double %x to i128
+  ret i128 %cvt
+}
+
+define i128 @fptoui_f64_to_i128(double %x) {
+; SDAG-LABEL: fptoui_f64_to_i128:
+; SDAG:       ; %bb.0: ; %fp-to-i-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_mov_b32_e32 v5, v1
+; SDAG-NEXT:    v_bfe_u32 v6, v5, 20, 11
+; SDAG-NEXT:    v_mov_b32_e32 v7, 0
+; SDAG-NEXT:    s_mov_b64 s[4:5], 0x3fe
+; SDAG-NEXT:    v_mov_b32_e32 v4, v0
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[4:5], v[6:7]
+; SDAG-NEXT:    v_mov_b32_e32 v0, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, 0
+; SDAG-NEXT:    v_mov_b32_e32 v1, 0
+; SDAG-NEXT:    v_mov_b32_e32 v3, 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[8:9], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB1_10
+; SDAG-NEXT:  ; %bb.1: ; %fp-to-i-if-end
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 0xfffffb81, v6
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, -1, v7, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v2, vcc, -1, v7, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v3, vcc, -1, v7, vcc
+; SDAG-NEXT:    s_mov_b64 s[6:7], 0xffffff7f
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[6:7], v[0:1]
+; SDAG-NEXT:    v_cmp_lt_i64_e64 s[4:5], -1, v[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, 0, 1, vcc
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    v_cndmask_b32_e64 v1, 0, 1, vcc
+; SDAG-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, v1, v0, vcc
+; SDAG-NEXT:    v_and_b32_e32 v0, 1, v0
+; SDAG-NEXT:    v_cmp_eq_u32_e32 vcc, 1, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_xor_b64 s[10:11], exec, s[6:7]
+; SDAG-NEXT:    s_cbranch_execz .LBB1_7
+; SDAG-NEXT:  ; %bb.2: ; %fp-to-i-if-end9
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, 0, 1, s[4:5]
+; SDAG-NEXT:    v_add_co_u32_e32 v9, vcc, -1, v0
+; SDAG-NEXT:    v_addc_co_u32_e64 v10, s[6:7], 0, -1, vcc
+; SDAG-NEXT:    s_mov_b64 s[6:7], 0x432
+; SDAG-NEXT:    v_and_b32_e32 v0, 0xfffff, v5
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[6:7], v[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e64 v8, -1, 0, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v11, -1, 1, s[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v5, 0x100000, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[6:7]
+; SDAG-NEXT:    s_cbranch_execz .LBB1_4
+; SDAG-NEXT:  ; %bb.3: ; %fp-to-i-if-else
+; SDAG-NEXT:    v_sub_u32_e32 v0, 0x473, v6
+; SDAG-NEXT:    v_add_u32_e32 v2, 0xfffffb8d, v6
+; SDAG-NEXT:    v_add_u32_e32 v7, 0xfffffbcd, v6
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v0, v[4:5]
+; SDAG-NEXT:    v_lshlrev_b64 v[2:3], v2, v[4:5]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v7
+; SDAG-NEXT:    v_cndmask_b32_e32 v1, v3, v1, vcc
+; SDAG-NEXT:    v_cmp_ne_u32_e64 s[6:7], 0, v7
+; SDAG-NEXT:    v_cndmask_b32_e64 v6, 0, v1, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e32 v2, v2, v0, vcc
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v7, v[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, v2, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e32 v12, 0, v0, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v7, 0, v1, vcc
+; SDAG-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v12, v11, 0
+; SDAG-NEXT:    v_mov_b32_e32 v3, 0
+; SDAG-NEXT:    v_mul_lo_u32 v13, v8, v2
+; SDAG-NEXT:    v_mad_u64_u32 v[4:5], s[6:7], v11, v2, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, v1
+; SDAG-NEXT:    v_mul_lo_u32 v6, v11, v6
+; SDAG-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v7, v11, v[2:3]
+; SDAG-NEXT:    v_mul_lo_u32 v10, v10, v12
+; SDAG-NEXT:    v_add3_u32 v5, v5, v6, v13
+; SDAG-NEXT:    v_mov_b32_e32 v6, v2
+; SDAG-NEXT:    v_mov_b32_e32 v2, v3
+; SDAG-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v12, v8, v[1:2]
+; SDAG-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v9, v12, v[4:5]
+; SDAG-NEXT:    v_add_co_u32_e32 v5, vcc, v6, v2
+; SDAG-NEXT:    v_addc_co_u32_e64 v6, s[6:7], 0, 0, vcc
+; SDAG-NEXT:    v_mul_lo_u32 v9, v9, v7
+; SDAG-NEXT:    v_mad_u64_u32 v[5:6], s[6:7], v7, v8, v[5:6]
+; SDAG-NEXT:    ; implicit-def: $vgpr11
+; SDAG-NEXT:    ; implicit-def: $vgpr8
+; SDAG-NEXT:    v_add3_u32 v4, v10, v4, v9
+; SDAG-NEXT:    v_add_co_u32_e32 v2, vcc, v5, v3
+; SDAG-NEXT:    v_addc_co_u32_e32 v3, vcc, v6, v4, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr6_vgpr7
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:    ; implicit-def: $vgpr9
+; SDAG-NEXT:    ; implicit-def: $vgpr10
+; SDAG-NEXT:  .LBB1_4: ; %Flow
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB1_6
+; SDAG-NEXT:  ; %bb.5: ; %fp-to-i-if-then12
+; SDAG-NEXT:    v_sub_u32_e32 v2, 0x433, v6
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v2, v[4:5]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[6:7], 0, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, 0, v0, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v1, 0, v1, vcc
+; SDAG-NEXT:    v_cndmask_b32_e64 v6, v0, v4, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e64 v5, v1, v5, s[6:7]
+; SDAG-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v6, v11, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, 0
+; SDAG-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v5, v11, v[1:2]
+; SDAG-NEXT:    v_mov_b32_e32 v7, v4
+; SDAG-NEXT:    v_mov_b32_e32 v4, v2
+; SDAG-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v6, v8, v[3:4]
+; SDAG-NEXT:    v_add_co_u32_e32 v2, vcc, v7, v2
+; SDAG-NEXT:    v_addc_co_u32_e64 v3, s[6:7], 0, 0, vcc
+; SDAG-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v5, v8, v[2:3]
+; SDAG-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v9, v6, v[2:3]
+; SDAG-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v10, v6, v[3:4]
+; SDAG-NEXT:    v_mad_i32_i24 v3, v9, v5, v3
+; SDAG-NEXT:  .LBB1_6: ; %Flow1
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:  .LBB1_7: ; %Flow2
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[6:7], s[10:11]
+; SDAG-NEXT:  ; %bb.8: ; %fp-to-i-if-then5
+; SDAG-NEXT:    v_bfrev_b32_e32 v0, 1
+; SDAG-NEXT:    v_bfrev_b32_e32 v1, -2
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, -1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v3, v0, v1, s[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v2
+; SDAG-NEXT:    v_mov_b32_e32 v1, v2
+; SDAG-NEXT:  ; %bb.9: ; %Flow3
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:  .LBB1_10: ; %fp-to-i-cleanup
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: fptoui_f64_to_i128:
+; GISEL:       ; %bb.0: ; %fp-to-i-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_mov_b32_e32 v5, v1
+; GISEL-NEXT:    v_mov_b32_e32 v4, v0
+; GISEL-NEXT:    v_lshrrev_b32_e32 v0, 20, v5
+; GISEL-NEXT:    v_and_b32_e32 v6, 0x7ff, v0
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x3ff
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0
+; GISEL-NEXT:    v_mov_b32_e32 v7, 0
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
+; GISEL-NEXT:    s_mov_b64 s[6:7], s[4:5]
+; GISEL-NEXT:    v_mov_b32_e32 v0, s4
+; GISEL-NEXT:    v_mov_b32_e32 v1, s5
+; GISEL-NEXT:    v_mov_b32_e32 v2, s6
+; GISEL-NEXT:    v_mov_b32_e32 v3, s7
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB1_10
+; GISEL-NEXT:  ; %bb.1: ; %fp-to-i-if-end
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 0xfffffb81, v6
+; GISEL-NEXT:    v_mov_b32_e32 v2, 0xffffff80
+; GISEL-NEXT:    v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
+; GISEL-NEXT:    v_mov_b32_e32 v3, 0
+; GISEL-NEXT:    v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
+; GISEL-NEXT:    v_addc_co_u32_e64 v9, s[6:7], 0, -1, s[6:7]
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, 1, vcc
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[8:9]
+; GISEL-NEXT:    v_cmp_lt_i64_e64 s[4:5], -1, v[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e32 v0, 1, v0, vcc
+; GISEL-NEXT:    v_and_b32_e32 v0, 1, v0
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v0
+; GISEL-NEXT:    ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_xor_b64 s[14:15], exec, s[6:7]
+; GISEL-NEXT:    s_cbranch_execz .LBB1_7
+; GISEL-NEXT:  ; %bb.2: ; %fp-to-i-if-end9
+; GISEL-NEXT:    s_xor_b64 s[6:7], s[4:5], -1
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, -1, s[6:7]
+; GISEL-NEXT:    v_and_b32_e32 v0, 1, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v2, 1, v0
+; GISEL-NEXT:    v_cndmask_b32_e64 v1, 0, 1, s[6:7]
+; GISEL-NEXT:    v_lshlrev_b16_e32 v3, 2, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v8, 3, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v9, 4, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v10, 5, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v11, 6, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v12, 7, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v13, 8, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v14, 9, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v15, 10, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v16, 11, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v17, 12, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v18, 13, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v19, 14, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v20, 15, v0
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v2
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v2
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v3
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v3
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v8
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v8
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v9
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v9
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v10
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v10
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v11
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v11
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v12
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v12
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v13
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v13
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v14
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v14
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v15
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v15
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v16
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v16
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v17
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v17
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v18
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v18
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v19
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v19
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v20
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v20
+; GISEL-NEXT:    v_and_b32_e32 v0, 0xffff, v0
+; GISEL-NEXT:    v_and_b32_e32 v1, 0xffff, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v2, 16, v0
+; GISEL-NEXT:    v_lshl_or_b32 v10, v0, 16, v0
+; GISEL-NEXT:    v_or3_b32 v8, v1, v2, 1
+; GISEL-NEXT:    v_or3_b32 v9, v0, v2, 0
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x433
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0
+; GISEL-NEXT:    v_and_b32_e32 v2, 0xfffff, v5
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
+; GISEL-NEXT:    v_or_b32_e32 v5, 0x100000, v2
+; GISEL-NEXT:    ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_xor_b64 s[16:17], exec, s[6:7]
+; GISEL-NEXT:    s_cbranch_execz .LBB1_4
+; GISEL-NEXT:  ; %bb.3: ; %fp-to-i-if-else
+; GISEL-NEXT:    v_add_co_u32_e32 v6, vcc, 0xfffffbcd, v6
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v6, v[4:5]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, 0, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, 0, v1, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v11, v10, 0
+; GISEL-NEXT:    v_subrev_u32_e32 v7, 64, v6
+; GISEL-NEXT:    v_sub_u32_e32 v2, 64, v6
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v2, v[4:5]
+; GISEL-NEXT:    v_lshlrev_b64 v[4:5], v7, v[4:5]
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[6:7], 0, v6
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v12, v9, v[0:1]
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v4, v2, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v13, v2, 0, s[6:7]
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v13, v8, v[6:7]
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[8:9], v11, v8, 0
+; GISEL-NEXT:    v_mov_b32_e32 v2, v6
+; GISEL-NEXT:    v_mul_lo_u32 v6, v11, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[8:9], v11, v9, v[1:2]
+; GISEL-NEXT:    v_mul_lo_u32 v4, v12, v10
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v5, v3, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[10:11], v12, v8, v[1:2]
+; GISEL-NEXT:    v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
+; GISEL-NEXT:    v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v13, v9, v[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v3, v3, 0, s[6:7]
+; GISEL-NEXT:    ; implicit-def: $vgpr10
+; GISEL-NEXT:    ; implicit-def: $vgpr9
+; GISEL-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v3, v8, v[6:7]
+; GISEL-NEXT:    ; implicit-def: $vgpr6
+; GISEL-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; GISEL-NEXT:    ; implicit-def: $vgpr8
+; GISEL-NEXT:  .LBB1_4: ; %Flow
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[8:9], s[16:17]
+; GISEL-NEXT:    s_cbranch_execz .LBB1_6
+; GISEL-NEXT:  ; %bb.5: ; %fp-to-i-if-then12
+; GISEL-NEXT:    v_sub_co_u32_e32 v6, vcc, 0x433, v6
+; GISEL-NEXT:    v_subrev_u32_e32 v2, 64, v6
+; GISEL-NEXT:    v_lshrrev_b64 v[0:1], v6, v[4:5]
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v2, 0
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v0, v2, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v1, v3, v1, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, v0, v4, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v4, v10, 0
+; GISEL-NEXT:    v_cndmask_b32_e32 v5, v1, v5, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v4, v8, 0
+; GISEL-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v5, v9, v[2:3]
+; GISEL-NEXT:    v_mul_lo_u32 v6, v5, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], vcc, v4, v9, v[1:2]
+; GISEL-NEXT:    v_mul_lo_u32 v4, v4, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v5, v8, v[1:2]
+; GISEL-NEXT:    v_addc_co_u32_e64 v3, s[6:7], v3, v4, s[6:7]
+; GISEL-NEXT:    v_addc_co_u32_e32 v3, vcc, v3, v6, vcc
+; GISEL-NEXT:  .LBB1_6: ; %Flow1
+; GISEL-NEXT:    s_or_b64 exec, exec, s[8:9]
+; GISEL-NEXT:  .LBB1_7: ; %Flow2
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[6:7], s[14:15]
+; GISEL-NEXT:    s_cbranch_execz .LBB1_9
+; GISEL-NEXT:  ; %bb.8: ; %fp-to-i-if-then5
+; GISEL-NEXT:    v_cndmask_b32_e64 v1, 0, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v1, 1, v1
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, 1, s[4:5]
+; GISEL-NEXT:    v_lshlrev_b32_e32 v2, 1, v1
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v2
+; GISEL-NEXT:    v_lshlrev_b32_e32 v3, 2, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v4, 3, v1
+; GISEL-NEXT:    v_or_b32_e32 v2, v1, v2
+; GISEL-NEXT:    v_or3_b32 v0, v0, v3, v4
+; GISEL-NEXT:    v_lshlrev_b32_e32 v5, 4, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v6, 5, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v3, v4
+; GISEL-NEXT:    v_or3_b32 v0, v0, v5, v6
+; GISEL-NEXT:    v_lshlrev_b32_e32 v7, 6, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v8, 7, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v5, v6
+; GISEL-NEXT:    v_or3_b32 v0, v0, v7, v8
+; GISEL-NEXT:    v_lshlrev_b32_e32 v9, 8, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v10, 9, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v7, v8
+; GISEL-NEXT:    v_or3_b32 v0, v0, v9, v10
+; GISEL-NEXT:    v_lshlrev_b32_e32 v11, 10, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v12, 11, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v9, v10
+; GISEL-NEXT:    v_or3_b32 v0, v0, v11, v12
+; GISEL-NEXT:    v_lshlrev_b32_e32 v13, 12, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v14, 13, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v11, v12
+; GISEL-NEXT:    v_or3_b32 v0, v0, v13, v14
+; GISEL-NEXT:    v_lshlrev_b32_e32 v15, 14, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v16, 15, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v13, v14
+; GISEL-NEXT:    v_or3_b32 v0, v0, v15, v16
+; GISEL-NEXT:    v_lshlrev_b32_e32 v17, 16, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v18, 17, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v15, v16
+; GISEL-NEXT:    v_or3_b32 v0, v0, v17, v18
+; GISEL-NEXT:    v_lshlrev_b32_e32 v19, 18, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v20, 19, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v17, v18
+; GISEL-NEXT:    v_or3_b32 v0, v0, v19, v20
+; GISEL-NEXT:    v_lshlrev_b32_e32 v3, 20, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v4, 21, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v19, v20
+; GISEL-NEXT:    v_or3_b32 v0, v0, v3, v4
+; GISEL-NEXT:    v_lshlrev_b32_e32 v5, 22, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v6, 23, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v3, v4
+; GISEL-NEXT:    v_or3_b32 v0, v0, v5, v6
+; GISEL-NEXT:    v_lshlrev_b32_e32 v7, 24, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v8, 25, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v5, v6
+; GISEL-NEXT:    v_or3_b32 v0, v0, v7, v8
+; GISEL-NEXT:    v_lshlrev_b32_e32 v9, 26, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v10, 27, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v7, v8
+; GISEL-NEXT:    v_or3_b32 v0, v0, v9, v10
+; GISEL-NEXT:    v_lshlrev_b32_e32 v11, 28, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v12, 29, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v9, v10
+; GISEL-NEXT:    v_or3_b32 v0, v0, v11, v12
+; GISEL-NEXT:    v_lshlrev_b32_e32 v13, 30, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v1, 31, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v11, v12
+; GISEL-NEXT:    v_or3_b32 v0, v0, v13, v1
+; GISEL-NEXT:    v_or3_b32 v1, v2, v13, v1
+; GISEL-NEXT:    v_add_co_u32_e32 v3, vcc, 0x80000000, v1
+; GISEL-NEXT:    v_mov_b32_e32 v2, v1
+; GISEL-NEXT:  .LBB1_9: ; %Flow3
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:  .LBB1_10: ; %fp-to-i-cleanup
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = fptoui double %x to i128
+  ret i128 %cvt
+}
+
+define i128 @fptosi_f32_to_i128(float %x) {
+; SDAG-LABEL: fptosi_f32_to_i128:
+; SDAG:       ; %bb.0: ; %fp-to-i-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_mov_b32_e32 v4, v0
+; SDAG-NEXT:    v_bfe_u32 v5, v4, 23, 8
+; SDAG-NEXT:    s_movk_i32 s4, 0x7e
+; SDAG-NEXT:    v_mov_b32_e32 v0, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, 0
+; SDAG-NEXT:    v_mov_b32_e32 v6, 0
+; SDAG-NEXT:    v_mov_b32_e32 v1, 0
+; SDAG-NEXT:    v_mov_b32_e32 v3, 0
+; SDAG-NEXT:    v_cmp_lt_u32_e32 vcc, s4, v5
+; SDAG-NEXT:    s_and_saveexec_b64 s[8:9], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB2_10
+; SDAG-NEXT:  ; %bb.1: ; %fp-to-i-if-end
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 0xffffff01, v5
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, -1, v6, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v2, vcc, -1, v6, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v3, vcc, -1, v6, vcc
+; SDAG-NEXT:    s_mov_b64 s[6:7], 0xffffff7f
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[6:7], v[0:1]
+; SDAG-NEXT:    v_cmp_lt_i32_e64 s[4:5], -1, v4
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, 0, 1, vcc
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    v_cndmask_b32_e64 v1, 0, 1, vcc
+; SDAG-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, v1, v0, vcc
+; SDAG-NEXT:    v_and_b32_e32 v0, 1, v0
+; SDAG-NEXT:    v_cmp_eq_u32_e32 vcc, 1, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_xor_b64 s[10:11], exec, s[6:7]
+; SDAG-NEXT:    s_cbranch_execz .LBB2_7
+; SDAG-NEXT:  ; %bb.2: ; %fp-to-i-if-end9
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, 0, 1, s[4:5]
+; SDAG-NEXT:    v_add_co_u32_e32 v9, vcc, -1, v0
+; SDAG-NEXT:    v_addc_co_u32_e64 v11, s[6:7], 0, -1, vcc
+; SDAG-NEXT:    s_mov_b64 s[6:7], 0x95
+; SDAG-NEXT:    v_and_b32_e32 v0, 0x7fffff, v4
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[6:7], v[5:6]
+; SDAG-NEXT:    v_mov_b32_e32 v7, 0
+; SDAG-NEXT:    v_cndmask_b32_e64 v8, -1, 0, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v10, -1, 1, s[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v6, 0x800000, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[6:7]
+; SDAG-NEXT:    s_cbranch_execz .LBB2_4
+; SDAG-NEXT:  ; %bb.3: ; %fp-to-i-if-else
+; SDAG-NEXT:    v_sub_u32_e32 v0, 0xd6, v5
+; SDAG-NEXT:    v_add_u32_e32 v2, 0xffffff2a, v5
+; SDAG-NEXT:    v_add_u32_e32 v4, 0xffffff6a, v5
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v0, v[6:7]
+; SDAG-NEXT:    v_lshlrev_b64 v[2:3], v2, v[6:7]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v4
+; SDAG-NEXT:    v_cndmask_b32_e32 v1, v3, v1, vcc
+; SDAG-NEXT:    v_cmp_ne_u32_e64 s[6:7], 0, v4
+; SDAG-NEXT:    v_cndmask_b32_e64 v3, 0, v1, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e32 v2, v2, v0, vcc
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v4, v[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, v2, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e32 v13, 0, v0, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v12, 0, v1, vcc
+; SDAG-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v13, v10, 0
+; SDAG-NEXT:    v_mul_lo_u32 v14, v8, v2
+; SDAG-NEXT:    v_mul_lo_u32 v15, v10, v3
+; SDAG-NEXT:    v_mov_b32_e32 v6, v1
+; SDAG-NEXT:    v_mad_u64_u32 v[4:5], s[6:7], v12, v10, v[6:7]
+; SDAG-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v10, v2, 0
+; SDAG-NEXT:    v_mov_b32_e32 v6, v5
+; SDAG-NEXT:    v_mov_b32_e32 v5, v7
+; SDAG-NEXT:    v_mad_u64_u32 v[4:5], s[6:7], v13, v8, v[4:5]
+; SDAG-NEXT:    v_add3_u32 v3, v3, v15, v14
+; SDAG-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v9, v13, v[2:3]
+; SDAG-NEXT:    v_add_co_u32_e32 v5, vcc, v6, v5
+; SDAG-NEXT:    v_addc_co_u32_e64 v6, s[6:7], 0, 0, vcc
+; SDAG-NEXT:    v_mul_lo_u32 v3, v9, v12
+; SDAG-NEXT:    v_mul_lo_u32 v7, v11, v13
+; SDAG-NEXT:    v_mad_u64_u32 v[5:6], s[6:7], v12, v8, v[5:6]
+; SDAG-NEXT:    ; implicit-def: $vgpr10
+; SDAG-NEXT:    ; implicit-def: $vgpr8
+; SDAG-NEXT:    ; implicit-def: $vgpr9
+; SDAG-NEXT:    v_add3_u32 v3, v7, v2, v3
+; SDAG-NEXT:    v_add_co_u32_e32 v2, vcc, v5, v1
+; SDAG-NEXT:    v_addc_co_u32_e32 v3, vcc, v6, v3, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr5_vgpr6
+; SDAG-NEXT:    v_mov_b32_e32 v1, v4
+; SDAG-NEXT:    ; implicit-def: $vgpr6_vgpr7
+; SDAG-NEXT:  .LBB2_4: ; %Flow
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[6:7], s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB2_6
+; SDAG-NEXT:  ; %bb.5: ; %fp-to-i-if-then12
+; SDAG-NEXT:    v_sub_u32_e32 v2, 0x96, v5
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v2, v[6:7]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, 0, v0, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v3, v0, v6, vcc
+; SDAG-NEXT:    v_mad_u64_u32 v[0:1], s[12:13], v3, v10, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, 0
+; SDAG-NEXT:    v_mad_u64_u32 v[4:5], s[12:13], v3, v8, v[1:2]
+; SDAG-NEXT:    v_mov_b32_e32 v1, v5
+; SDAG-NEXT:    v_mad_i64_i32 v[2:3], s[12:13], v9, v3, v[1:2]
+; SDAG-NEXT:    v_mov_b32_e32 v1, v4
+; SDAG-NEXT:  .LBB2_6: ; %Flow1
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:  .LBB2_7: ; %Flow2
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[6:7], s[10:11]
+; SDAG-NEXT:  ; %bb.8: ; %fp-to-i-if-then5
+; SDAG-NEXT:    v_bfrev_b32_e32 v0, 1
+; SDAG-NEXT:    v_bfrev_b32_e32 v1, -2
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, -1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v3, v0, v1, s[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v2
+; SDAG-NEXT:    v_mov_b32_e32 v1, v2
+; SDAG-NEXT:  ; %bb.9: ; %Flow3
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:  .LBB2_10: ; %fp-to-i-cleanup
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: fptosi_f32_to_i128:
+; GISEL:       ; %bb.0: ; %fp-to-i-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_mov_b32_e32 v4, v0
+; GISEL-NEXT:    v_mov_b32_e32 v5, 0
+; GISEL-NEXT:    v_lshrrev_b64 v[0:1], 23, v[4:5]
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    v_bfe_u32 v6, v0, 0, 8
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x7f
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0
+; GISEL-NEXT:    v_mov_b32_e32 v7, v5
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
+; GISEL-NEXT:    s_mov_b64 s[6:7], s[4:5]
+; GISEL-NEXT:    v_mov_b32_e32 v0, s4
+; GISEL-NEXT:    v_mov_b32_e32 v1, s5
+; GISEL-NEXT:    v_mov_b32_e32 v2, s6
+; GISEL-NEXT:    v_mov_b32_e32 v3, s7
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB2_10
+; GISEL-NEXT:  ; %bb.1: ; %fp-to-i-if-end
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 0xffffff01, v6
+; GISEL-NEXT:    v_mov_b32_e32 v2, 0xffffff80
+; GISEL-NEXT:    v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
+; GISEL-NEXT:    v_mov_b32_e32 v3, 0
+; GISEL-NEXT:    v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
+; GISEL-NEXT:    v_addc_co_u32_e64 v9, s[6:7], 0, -1, s[6:7]
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, 1, vcc
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[8:9]
+; GISEL-NEXT:    v_cmp_lt_i32_e64 s[4:5], -1, v4
+; GISEL-NEXT:    v_cndmask_b32_e32 v0, 1, v0, vcc
+; GISEL-NEXT:    v_and_b32_e32 v0, 1, v0
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v0
+; GISEL-NEXT:    ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_xor_b64 s[14:15], exec, s[6:7]
+; GISEL-NEXT:    s_cbranch_execz .LBB2_7
+; GISEL-NEXT:  ; %bb.2: ; %fp-to-i-if-end9
+; GISEL-NEXT:    s_xor_b64 s[6:7], s[4:5], -1
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, -1, s[6:7]
+; GISEL-NEXT:    v_and_b32_e32 v0, 1, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v2, 1, v0
+; GISEL-NEXT:    v_cndmask_b32_e64 v1, 0, 1, s[6:7]
+; GISEL-NEXT:    v_lshlrev_b16_e32 v3, 2, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v5, 3, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v8, 4, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v9, 5, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v10, 6, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v11, 7, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v12, 8, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v13, 9, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v14, 10, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v15, 11, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v16, 12, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v17, 13, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v18, 14, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v19, 15, v0
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v2
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v2
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v3
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v3
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v5
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v5
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v8
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v8
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v9
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v9
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v10
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v10
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v11
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v11
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v12
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v12
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v13
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v13
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v14
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v14
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v15
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v15
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v16
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v16
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v17
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v17
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v18
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v18
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v19
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v19
+; GISEL-NEXT:    v_and_b32_e32 v0, 0xffff, v0
+; GISEL-NEXT:    v_and_b32_e32 v1, 0xffff, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v2, 16, v0
+; GISEL-NEXT:    v_lshl_or_b32 v10, v0, 16, v0
+; GISEL-NEXT:    v_or3_b32 v9, v1, v2, 1
+; GISEL-NEXT:    v_or3_b32 v8, v0, v2, 0
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x96
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0
+; GISEL-NEXT:    v_and_b32_e32 v2, 0x7fffff, v4
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
+; GISEL-NEXT:    v_or_b32_e32 v4, 0x800000, v2
+; GISEL-NEXT:    v_mov_b32_e32 v5, 0
+; GISEL-NEXT:    ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_xor_b64 s[16:17], exec, s[6:7]
+; GISEL-NEXT:    s_cbranch_execz .LBB2_4
+; GISEL-NEXT:  ; %bb.3: ; %fp-to-i-if-else
+; GISEL-NEXT:    v_add_co_u32_e32 v6, vcc, 0xffffff6a, v6
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v6, v[4:5]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, 0, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, 0, v1, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v11, v10, 0
+; GISEL-NEXT:    v_subrev_u32_e32 v7, 64, v6
+; GISEL-NEXT:    v_sub_u32_e32 v2, 64, v6
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v2, v[4:5]
+; GISEL-NEXT:    v_lshlrev_b64 v[4:5], v7, v[4:5]
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[6:7], 0, v6
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v12, v8, v[0:1]
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v4, v2, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v13, v2, 0, s[6:7]
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v13, v9, v[6:7]
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[8:9], v11, v9, 0
+; GISEL-NEXT:    v_mov_b32_e32 v2, v6
+; GISEL-NEXT:    v_mul_lo_u32 v6, v11, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[8:9], v11, v8, v[1:2]
+; GISEL-NEXT:    v_mul_lo_u32 v4, v12, v10
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v5, v3, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[10:11], v12, v9, v[1:2]
+; GISEL-NEXT:    v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
+; GISEL-NEXT:    v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v13, v8, v[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v3, v3, 0, s[6:7]
+; GISEL-NEXT:    ; implicit-def: $vgpr10
+; GISEL-NEXT:    ; implicit-def: $vgpr8
+; GISEL-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v3, v9, v[6:7]
+; GISEL-NEXT:    ; implicit-def: $vgpr6
+; GISEL-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; GISEL-NEXT:    ; implicit-def: $vgpr9
+; GISEL-NEXT:  .LBB2_4: ; %Flow
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[6:7], s[16:17]
+; GISEL-NEXT:    s_cbranch_execz .LBB2_6
+; GISEL-NEXT:  ; %bb.5: ; %fp-to-i-if-then12
+; GISEL-NEXT:    v_sub_co_u32_e32 v3, vcc, 0x96, v6
+; GISEL-NEXT:    v_subrev_u32_e32 v2, 64, v3
+; GISEL-NEXT:    v_lshrrev_b64 v[0:1], v3, v[4:5]
+; GISEL-NEXT:    v_lshrrev_b64 v[1:2], v2, 0
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v3
+; GISEL-NEXT:    v_cndmask_b32_e32 v0, v1, v0, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v3
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, v0, v4, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[8:9], v4, v9, 0
+; GISEL-NEXT:    v_mad_u64_u32 v[2:3], s[8:9], v4, v10, 0
+; GISEL-NEXT:    v_mul_lo_u32 v5, v4, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], vcc, v4, v8, v[1:2]
+; GISEL-NEXT:    v_addc_co_u32_e32 v3, vcc, v3, v5, vcc
+; GISEL-NEXT:  .LBB2_6: ; %Flow1
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:  .LBB2_7: ; %Flow2
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[6:7], s[14:15]
+; GISEL-NEXT:    s_cbranch_execz .LBB2_9
+; GISEL-NEXT:  ; %bb.8: ; %fp-to-i-if-then5
+; GISEL-NEXT:    v_cndmask_b32_e64 v1, 0, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v1, 1, v1
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, 1, s[4:5]
+; GISEL-NEXT:    v_lshlrev_b32_e32 v2, 1, v1
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v2
+; GISEL-NEXT:    v_lshlrev_b32_e32 v3, 2, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v4, 3, v1
+; GISEL-NEXT:    v_or_b32_e32 v2, v1, v2
+; GISEL-NEXT:    v_or3_b32 v0, v0, v3, v4
+; GISEL-NEXT:    v_lshlrev_b32_e32 v5, 4, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v6, 5, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v3, v4
+; GISEL-NEXT:    v_or3_b32 v0, v0, v5, v6
+; GISEL-NEXT:    v_lshlrev_b32_e32 v7, 6, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v8, 7, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v5, v6
+; GISEL-NEXT:    v_or3_b32 v0, v0, v7, v8
+; GISEL-NEXT:    v_lshlrev_b32_e32 v9, 8, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v10, 9, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v7, v8
+; GISEL-NEXT:    v_or3_b32 v0, v0, v9, v10
+; GISEL-NEXT:    v_lshlrev_b32_e32 v11, 10, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v12, 11, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v9, v10
+; GISEL-NEXT:    v_or3_b32 v0, v0, v11, v12
+; GISEL-NEXT:    v_lshlrev_b32_e32 v13, 12, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v14, 13, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v11, v12
+; GISEL-NEXT:    v_or3_b32 v0, v0, v13, v14
+; GISEL-NEXT:    v_lshlrev_b32_e32 v15, 14, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v16, 15, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v13, v14
+; GISEL-NEXT:    v_or3_b32 v0, v0, v15, v16
+; GISEL-NEXT:    v_lshlrev_b32_e32 v17, 16, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v18, 17, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v15, v16
+; GISEL-NEXT:    v_or3_b32 v0, v0, v17, v18
+; GISEL-NEXT:    v_lshlrev_b32_e32 v19, 18, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v20, 19, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v17, v18
+; GISEL-NEXT:    v_or3_b32 v0, v0, v19, v20
+; GISEL-NEXT:    v_lshlrev_b32_e32 v3, 20, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v4, 21, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v19, v20
+; GISEL-NEXT:    v_or3_b32 v0, v0, v3, v4
+; GISEL-NEXT:    v_lshlrev_b32_e32 v5, 22, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v6, 23, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v3, v4
+; GISEL-NEXT:    v_or3_b32 v0, v0, v5, v6
+; GISEL-NEXT:    v_lshlrev_b32_e32 v7, 24, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v8, 25, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v5, v6
+; GISEL-NEXT:    v_or3_b32 v0, v0, v7, v8
+; GISEL-NEXT:    v_lshlrev_b32_e32 v9, 26, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v10, 27, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v7, v8
+; GISEL-NEXT:    v_or3_b32 v0, v0, v9, v10
+; GISEL-NEXT:    v_lshlrev_b32_e32 v11, 28, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v12, 29, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v9, v10
+; GISEL-NEXT:    v_or3_b32 v0, v0, v11, v12
+; GISEL-NEXT:    v_lshlrev_b32_e32 v13, 30, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v1, 31, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v11, v12
+; GISEL-NEXT:    v_or3_b32 v0, v0, v13, v1
+; GISEL-NEXT:    v_or3_b32 v1, v2, v13, v1
+; GISEL-NEXT:    v_add_co_u32_e32 v3, vcc, 0x80000000, v1
+; GISEL-NEXT:    v_mov_b32_e32 v2, v1
+; GISEL-NEXT:  .LBB2_9: ; %Flow3
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:  .LBB2_10: ; %fp-to-i-cleanup
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = fptosi float %x to i128
+  ret i128 %cvt
+}
+
+define i128 @fptoui_f32_to_i128(float %x) {
+; SDAG-LABEL: fptoui_f32_to_i128:
+; SDAG:       ; %bb.0: ; %fp-to-i-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_mov_b32_e32 v4, v0
+; SDAG-NEXT:    v_bfe_u32 v5, v4, 23, 8
+; SDAG-NEXT:    s_movk_i32 s4, 0x7e
+; SDAG-NEXT:    v_mov_b32_e32 v0, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, 0
+; SDAG-NEXT:    v_mov_b32_e32 v6, 0
+; SDAG-NEXT:    v_mov_b32_e32 v1, 0
+; SDAG-NEXT:    v_mov_b32_e32 v3, 0
+; SDAG-NEXT:    v_cmp_lt_u32_e32 vcc, s4, v5
+; SDAG-NEXT:    s_and_saveexec_b64 s[8:9], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB3_10
+; SDAG-NEXT:  ; %bb.1: ; %fp-to-i-if-end
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 0xffffff01, v5
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, -1, v6, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v2, vcc, -1, v6, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v3, vcc, -1, v6, vcc
+; SDAG-NEXT:    s_mov_b64 s[6:7], 0xffffff7f
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[6:7], v[0:1]
+; SDAG-NEXT:    v_cmp_lt_i32_e64 s[4:5], -1, v4
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, 0, 1, vcc
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    v_cndmask_b32_e64 v1, 0, 1, vcc
+; SDAG-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, v1, v0, vcc
+; SDAG-NEXT:    v_and_b32_e32 v0, 1, v0
+; SDAG-NEXT:    v_cmp_eq_u32_e32 vcc, 1, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_xor_b64 s[10:11], exec, s[6:7]
+; SDAG-NEXT:    s_cbranch_execz .LBB3_7
+; SDAG-NEXT:  ; %bb.2: ; %fp-to-i-if-end9
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, 0, 1, s[4:5]
+; SDAG-NEXT:    v_add_co_u32_e32 v9, vcc, -1, v0
+; SDAG-NEXT:    v_addc_co_u32_e64 v11, s[6:7], 0, -1, vcc
+; SDAG-NEXT:    s_mov_b64 s[6:7], 0x95
+; SDAG-NEXT:    v_and_b32_e32 v0, 0x7fffff, v4
+; SDAG-NEXT:    v_cmp_lt_u64_e32 vcc, s[6:7], v[5:6]
+; SDAG-NEXT:    v_mov_b32_e32 v7, 0
+; SDAG-NEXT:    v_cndmask_b32_e64 v8, -1, 0, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v10, -1, 1, s[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v6, 0x800000, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[6:7]
+; SDAG-NEXT:    s_cbranch_execz .LBB3_4
+; SDAG-NEXT:  ; %bb.3: ; %fp-to-i-if-else
+; SDAG-NEXT:    v_sub_u32_e32 v0, 0xd6, v5
+; SDAG-NEXT:    v_add_u32_e32 v2, 0xffffff2a, v5
+; SDAG-NEXT:    v_add_u32_e32 v4, 0xffffff6a, v5
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v0, v[6:7]
+; SDAG-NEXT:    v_lshlrev_b64 v[2:3], v2, v[6:7]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v4
+; SDAG-NEXT:    v_cndmask_b32_e32 v1, v3, v1, vcc
+; SDAG-NEXT:    v_cmp_ne_u32_e64 s[6:7], 0, v4
+; SDAG-NEXT:    v_cndmask_b32_e64 v3, 0, v1, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e32 v2, v2, v0, vcc
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v4, v[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, v2, s[6:7]
+; SDAG-NEXT:    v_cndmask_b32_e32 v13, 0, v0, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v12, 0, v1, vcc
+; SDAG-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v13, v10, 0
+; SDAG-NEXT:    v_mul_lo_u32 v14, v8, v2
+; SDAG-NEXT:    v_mul_lo_u32 v15, v10, v3
+; SDAG-NEXT:    v_mov_b32_e32 v6, v1
+; SDAG-NEXT:    v_mad_u64_u32 v[4:5], s[6:7], v12, v10, v[6:7]
+; SDAG-NEXT:    v_mad_u64_u32 v[2:3], s[6:7], v10, v2, 0
+; SDAG-NEXT:    v_mov_b32_e32 v6, v5
+; SDAG-NEXT:    v_mov_b32_e32 v5, v7
+; SDAG-NEXT:    v_mad_u64_u32 v[4:5], s[6:7], v13, v8, v[4:5]
+; SDAG-NEXT:    v_add3_u32 v3, v3, v15, v14
+; SDAG-NEXT:    v_mad_u64_u32 v[1:2], s[6:7], v9, v13, v[2:3]
+; SDAG-NEXT:    v_add_co_u32_e32 v5, vcc, v6, v5
+; SDAG-NEXT:    v_addc_co_u32_e64 v6, s[6:7], 0, 0, vcc
+; SDAG-NEXT:    v_mul_lo_u32 v3, v9, v12
+; SDAG-NEXT:    v_mul_lo_u32 v7, v11, v13
+; SDAG-NEXT:    v_mad_u64_u32 v[5:6], s[6:7], v12, v8, v[5:6]
+; SDAG-NEXT:    ; implicit-def: $vgpr10
+; SDAG-NEXT:    ; implicit-def: $vgpr8
+; SDAG-NEXT:    ; implicit-def: $vgpr9
+; SDAG-NEXT:    v_add3_u32 v3, v7, v2, v3
+; SDAG-NEXT:    v_add_co_u32_e32 v2, vcc, v5, v1
+; SDAG-NEXT:    v_addc_co_u32_e32 v3, vcc, v6, v3, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr5_vgpr6
+; SDAG-NEXT:    v_mov_b32_e32 v1, v4
+; SDAG-NEXT:    ; implicit-def: $vgpr6_vgpr7
+; SDAG-NEXT:  .LBB3_4: ; %Flow
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[6:7], s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB3_6
+; SDAG-NEXT:  ; %bb.5: ; %fp-to-i-if-then12
+; SDAG-NEXT:    v_sub_u32_e32 v2, 0x96, v5
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v2, v[6:7]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, 0, v0, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v3, v0, v6, vcc
+; SDAG-NEXT:    v_mad_u64_u32 v[0:1], s[12:13], v3, v10, 0
+; SDAG-NEXT:    v_mov_b32_e32 v2, 0
+; SDAG-NEXT:    v_mad_u64_u32 v[4:5], s[12:13], v3, v8, v[1:2]
+; SDAG-NEXT:    v_mov_b32_e32 v1, v5
+; SDAG-NEXT:    v_mad_i64_i32 v[2:3], s[12:13], v9, v3, v[1:2]
+; SDAG-NEXT:    v_mov_b32_e32 v1, v4
+; SDAG-NEXT:  .LBB3_6: ; %Flow1
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:  .LBB3_7: ; %Flow2
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[6:7], s[10:11]
+; SDAG-NEXT:  ; %bb.8: ; %fp-to-i-if-then5
+; SDAG-NEXT:    v_bfrev_b32_e32 v0, 1
+; SDAG-NEXT:    v_bfrev_b32_e32 v1, -2
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, -1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v3, v0, v1, s[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v2
+; SDAG-NEXT:    v_mov_b32_e32 v1, v2
+; SDAG-NEXT:  ; %bb.9: ; %Flow3
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:  .LBB3_10: ; %fp-to-i-cleanup
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: fptoui_f32_to_i128:
+; GISEL:       ; %bb.0: ; %fp-to-i-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_mov_b32_e32 v4, v0
+; GISEL-NEXT:    v_mov_b32_e32 v5, 0
+; GISEL-NEXT:    v_lshrrev_b64 v[0:1], 23, v[4:5]
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    v_bfe_u32 v6, v0, 0, 8
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x7f
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0
+; GISEL-NEXT:    v_mov_b32_e32 v7, v5
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
+; GISEL-NEXT:    s_mov_b64 s[6:7], s[4:5]
+; GISEL-NEXT:    v_mov_b32_e32 v0, s4
+; GISEL-NEXT:    v_mov_b32_e32 v1, s5
+; GISEL-NEXT:    v_mov_b32_e32 v2, s6
+; GISEL-NEXT:    v_mov_b32_e32 v3, s7
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB3_10
+; GISEL-NEXT:  ; %bb.1: ; %fp-to-i-if-end
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 0xffffff01, v6
+; GISEL-NEXT:    v_mov_b32_e32 v2, 0xffffff80
+; GISEL-NEXT:    v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
+; GISEL-NEXT:    v_mov_b32_e32 v3, 0
+; GISEL-NEXT:    v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
+; GISEL-NEXT:    v_addc_co_u32_e64 v9, s[6:7], 0, -1, s[6:7]
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, 1, vcc
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[8:9]
+; GISEL-NEXT:    v_cmp_lt_i32_e64 s[4:5], -1, v4
+; GISEL-NEXT:    v_cndmask_b32_e32 v0, 1, v0, vcc
+; GISEL-NEXT:    v_and_b32_e32 v0, 1, v0
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v0
+; GISEL-NEXT:    ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_xor_b64 s[14:15], exec, s[6:7]
+; GISEL-NEXT:    s_cbranch_execz .LBB3_7
+; GISEL-NEXT:  ; %bb.2: ; %fp-to-i-if-end9
+; GISEL-NEXT:    s_xor_b64 s[6:7], s[4:5], -1
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, -1, s[6:7]
+; GISEL-NEXT:    v_and_b32_e32 v0, 1, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v2, 1, v0
+; GISEL-NEXT:    v_cndmask_b32_e64 v1, 0, 1, s[6:7]
+; GISEL-NEXT:    v_lshlrev_b16_e32 v3, 2, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v5, 3, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v8, 4, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v9, 5, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v10, 6, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v11, 7, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v12, 8, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v13, 9, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v14, 10, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v15, 11, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v16, 12, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v17, 13, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v18, 14, v0
+; GISEL-NEXT:    v_lshlrev_b16_e32 v19, 15, v0
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v2
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v2
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v3
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v3
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v5
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v5
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v8
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v8
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v9
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v9
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v10
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v10
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v11
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v11
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v12
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v12
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v13
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v13
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v14
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v14
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v15
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v15
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v16
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v16
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v17
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v17
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v18
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v18
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v19
+; GISEL-NEXT:    v_or_b32_e32 v1, v1, v19
+; GISEL-NEXT:    v_and_b32_e32 v0, 0xffff, v0
+; GISEL-NEXT:    v_and_b32_e32 v1, 0xffff, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v2, 16, v0
+; GISEL-NEXT:    v_lshl_or_b32 v10, v0, 16, v0
+; GISEL-NEXT:    v_or3_b32 v9, v1, v2, 1
+; GISEL-NEXT:    v_or3_b32 v8, v0, v2, 0
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x96
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0
+; GISEL-NEXT:    v_and_b32_e32 v2, 0x7fffff, v4
+; GISEL-NEXT:    v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
+; GISEL-NEXT:    v_or_b32_e32 v4, 0x800000, v2
+; GISEL-NEXT:    v_mov_b32_e32 v5, 0
+; GISEL-NEXT:    ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_xor_b64 s[16:17], exec, s[6:7]
+; GISEL-NEXT:    s_cbranch_execz .LBB3_4
+; GISEL-NEXT:  ; %bb.3: ; %fp-to-i-if-else
+; GISEL-NEXT:    v_add_co_u32_e32 v6, vcc, 0xffffff6a, v6
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v6, v[4:5]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, 0, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, 0, v1, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[6:7], v11, v10, 0
+; GISEL-NEXT:    v_subrev_u32_e32 v7, 64, v6
+; GISEL-NEXT:    v_sub_u32_e32 v2, 64, v6
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v2, v[4:5]
+; GISEL-NEXT:    v_lshlrev_b64 v[4:5], v7, v[4:5]
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[6:7], 0, v6
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v12, v8, v[0:1]
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v4, v2, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v13, v2, 0, s[6:7]
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v13, v9, v[6:7]
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[8:9], v11, v9, 0
+; GISEL-NEXT:    v_mov_b32_e32 v2, v6
+; GISEL-NEXT:    v_mul_lo_u32 v6, v11, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[8:9], v11, v8, v[1:2]
+; GISEL-NEXT:    v_mul_lo_u32 v4, v12, v10
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v5, v3, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], s[10:11], v12, v9, v[1:2]
+; GISEL-NEXT:    v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
+; GISEL-NEXT:    v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
+; GISEL-NEXT:    v_mad_u64_u32 v[6:7], s[8:9], v13, v8, v[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v3, v3, 0, s[6:7]
+; GISEL-NEXT:    ; implicit-def: $vgpr10
+; GISEL-NEXT:    ; implicit-def: $vgpr8
+; GISEL-NEXT:    v_mad_u64_u32 v[3:4], s[6:7], v3, v9, v[6:7]
+; GISEL-NEXT:    ; implicit-def: $vgpr6
+; GISEL-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; GISEL-NEXT:    ; implicit-def: $vgpr9
+; GISEL-NEXT:  .LBB3_4: ; %Flow
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[6:7], s[16:17]
+; GISEL-NEXT:    s_cbranch_execz .LBB3_6
+; GISEL-NEXT:  ; %bb.5: ; %fp-to-i-if-then12
+; GISEL-NEXT:    v_sub_co_u32_e32 v3, vcc, 0x96, v6
+; GISEL-NEXT:    v_subrev_u32_e32 v2, 64, v3
+; GISEL-NEXT:    v_lshrrev_b64 v[0:1], v3, v[4:5]
+; GISEL-NEXT:    v_lshrrev_b64 v[1:2], v2, 0
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v3
+; GISEL-NEXT:    v_cndmask_b32_e32 v0, v1, v0, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v3
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, v0, v4, vcc
+; GISEL-NEXT:    v_mad_u64_u32 v[0:1], s[8:9], v4, v9, 0
+; GISEL-NEXT:    v_mad_u64_u32 v[2:3], s[8:9], v4, v10, 0
+; GISEL-NEXT:    v_mul_lo_u32 v5, v4, v10
+; GISEL-NEXT:    v_mad_u64_u32 v[1:2], vcc, v4, v8, v[1:2]
+; GISEL-NEXT:    v_addc_co_u32_e32 v3, vcc, v3, v5, vcc
+; GISEL-NEXT:  .LBB3_6: ; %Flow1
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:  .LBB3_7: ; %Flow2
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[6:7], s[14:15]
+; GISEL-NEXT:    s_cbranch_execz .LBB3_9
+; GISEL-NEXT:  ; %bb.8: ; %fp-to-i-if-then5
+; GISEL-NEXT:    v_cndmask_b32_e64 v1, 0, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v1, 1, v1
+; GISEL-NEXT:    v_cndmask_b32_e64 v0, 0, 1, s[4:5]
+; GISEL-NEXT:    v_lshlrev_b32_e32 v2, 1, v1
+; GISEL-NEXT:    v_or_b32_e32 v0, v0, v2
+; GISEL-NEXT:    v_lshlrev_b32_e32 v3, 2, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v4, 3, v1
+; GISEL-NEXT:    v_or_b32_e32 v2, v1, v2
+; GISEL-NEXT:    v_or3_b32 v0, v0, v3, v4
+; GISEL-NEXT:    v_lshlrev_b32_e32 v5, 4, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v6, 5, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v3, v4
+; GISEL-NEXT:    v_or3_b32 v0, v0, v5, v6
+; GISEL-NEXT:    v_lshlrev_b32_e32 v7, 6, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v8, 7, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v5, v6
+; GISEL-NEXT:    v_or3_b32 v0, v0, v7, v8
+; GISEL-NEXT:    v_lshlrev_b32_e32 v9, 8, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v10, 9, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v7, v8
+; GISEL-NEXT:    v_or3_b32 v0, v0, v9, v10
+; GISEL-NEXT:    v_lshlrev_b32_e32 v11, 10, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v12, 11, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v9, v10
+; GISEL-NEXT:    v_or3_b32 v0, v0, v11, v12
+; GISEL-NEXT:    v_lshlrev_b32_e32 v13, 12, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v14, 13, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v11, v12
+; GISEL-NEXT:    v_or3_b32 v0, v0, v13, v14
+; GISEL-NEXT:    v_lshlrev_b32_e32 v15, 14, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v16, 15, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v13, v14
+; GISEL-NEXT:    v_or3_b32 v0, v0, v15, v16
+; GISEL-NEXT:    v_lshlrev_b32_e32 v17, 16, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v18, 17, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v15, v16
+; GISEL-NEXT:    v_or3_b32 v0, v0, v17, v18
+; GISEL-NEXT:    v_lshlrev_b32_e32 v19, 18, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v20, 19, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v17, v18
+; GISEL-NEXT:    v_or3_b32 v0, v0, v19, v20
+; GISEL-NEXT:    v_lshlrev_b32_e32 v3, 20, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v4, 21, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v19, v20
+; GISEL-NEXT:    v_or3_b32 v0, v0, v3, v4
+; GISEL-NEXT:    v_lshlrev_b32_e32 v5, 22, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v6, 23, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v3, v4
+; GISEL-NEXT:    v_or3_b32 v0, v0, v5, v6
+; GISEL-NEXT:    v_lshlrev_b32_e32 v7, 24, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v8, 25, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v5, v6
+; GISEL-NEXT:    v_or3_b32 v0, v0, v7, v8
+; GISEL-NEXT:    v_lshlrev_b32_e32 v9, 26, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v10, 27, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v7, v8
+; GISEL-NEXT:    v_or3_b32 v0, v0, v9, v10
+; GISEL-NEXT:    v_lshlrev_b32_e32 v11, 28, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v12, 29, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v9, v10
+; GISEL-NEXT:    v_or3_b32 v0, v0, v11, v12
+; GISEL-NEXT:    v_lshlrev_b32_e32 v13, 30, v1
+; GISEL-NEXT:    v_lshlrev_b32_e32 v1, 31, v1
+; GISEL-NEXT:    v_or3_b32 v2, v2, v11, v12
+; GISEL-NEXT:    v_or3_b32 v0, v0, v13, v1
+; GISEL-NEXT:    v_or3_b32 v1, v2, v13, v1
+; GISEL-NEXT:    v_add_co_u32_e32 v3, vcc, 0x80000000, v1
+; GISEL-NEXT:    v_mov_b32_e32 v2, v1
+; GISEL-NEXT:  .LBB3_9: ; %Flow3
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:  .LBB3_10: ; %fp-to-i-cleanup
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = fptoui float %x to i128
+  ret i128 %cvt
+}
+
+define i128 @fptosi_f16_to_i128(half %x) {
+; GCN-LABEL: fptosi_f16_to_i128:
+; GCN:       ; %bb.0:
+; GCN-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GCN-NEXT:    v_cvt_f32_f16_e32 v0, v0
+; GCN-NEXT:    v_cvt_i32_f32_e32 v0, v0
+; GCN-NEXT:    v_ashrrev_i32_e32 v1, 31, v0
+; GCN-NEXT:    v_mov_b32_e32 v2, v1
+; GCN-NEXT:    v_mov_b32_e32 v3, v1
+; GCN-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = fptosi half %x to i128
+  ret i128 %cvt
+}
+
+define i128 @fptoui_f16_to_i128(half %x) {
+; GCN-LABEL: fptoui_f16_to_i128:
+; GCN:       ; %bb.0:
+; GCN-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GCN-NEXT:    v_cvt_f32_f16_e32 v0, v0
+; GCN-NEXT:    v_mov_b32_e32 v1, 0
+; GCN-NEXT:    v_mov_b32_e32 v2, 0
+; GCN-NEXT:    v_mov_b32_e32 v3, 0
+; GCN-NEXT:    v_cvt_u32_f32_e32 v0, v0
+; GCN-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = fptoui half %x to i128
+  ret i128 %cvt
+}
+
+; FIXME: ExpandLargeFpConvert asserts on bfloat
+; define i128 @fptosi_bf16_to_i128(bfloat %x) {
+;   %cvt = fptosi bfloat %x to i128
+;   ret i128 %cvt
+; }
+
+; define i128 @fptoui_bf16_to_i128(bfloat %x) {
+;   %cvt = fptoui bfloat %x to i128
+;   ret i128 %cvt
+; }

diff  --git a/llvm/test/CodeGen/AMDGPU/itofp.i128.ll b/llvm/test/CodeGen/AMDGPU/itofp.i128.ll
new file mode 100644
index 00000000000000..e4e8d52addf847
--- /dev/null
+++ b/llvm/test/CodeGen/AMDGPU/itofp.i128.ll
@@ -0,0 +1,1802 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
+; RUN: llc -global-isel=0 -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefixes=GCN,SDAG %s
+; RUN: llc -global-isel=1 -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefixes=GCN,GISEL %s
+
+define float @sitofp_i128_to_f32(i128 %x) {
+; SDAG-LABEL: sitofp_i128_to_f32:
+; SDAG:       ; %bb.0: ; %itofp-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_or_b32_e32 v5, v1, v3
+; SDAG-NEXT:    v_or_b32_e32 v4, v0, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v4, 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB0_16
+; SDAG-NEXT:  ; %bb.1: ; %itofp-if-end
+; SDAG-NEXT:    v_ashrrev_i32_e32 v5, 31, v3
+; SDAG-NEXT:    v_xor_b32_e32 v0, v5, v0
+; SDAG-NEXT:    v_xor_b32_e32 v1, v5, v1
+; SDAG-NEXT:    v_sub_co_u32_e32 v0, vcc, v0, v5
+; SDAG-NEXT:    v_xor_b32_e32 v2, v5, v2
+; SDAG-NEXT:    v_subb_co_u32_e32 v1, vcc, v1, v5, vcc
+; SDAG-NEXT:    v_xor_b32_e32 v6, v5, v3
+; SDAG-NEXT:    v_subb_co_u32_e32 v4, vcc, v2, v5, vcc
+; SDAG-NEXT:    v_subb_co_u32_e32 v5, vcc, v6, v5, vcc
+; SDAG-NEXT:    v_ffbh_u32_e32 v2, v4
+; SDAG-NEXT:    v_add_u32_e32 v2, 32, v2
+; SDAG-NEXT:    v_ffbh_u32_e32 v6, v5
+; SDAG-NEXT:    v_min_u32_e32 v2, v2, v6
+; SDAG-NEXT:    v_ffbh_u32_e32 v6, v0
+; SDAG-NEXT:    v_add_u32_e32 v6, 32, v6
+; SDAG-NEXT:    v_ffbh_u32_e32 v7, v1
+; SDAG-NEXT:    v_min_u32_e32 v6, v6, v7
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    v_add_u32_e32 v6, 64, v6
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, v6, v2, vcc
+; SDAG-NEXT:    v_sub_u32_e32 v2, 0x7f, v9
+; SDAG-NEXT:    v_cmp_gt_i32_e32 vcc, 25, v2
+; SDAG-NEXT:    ; implicit-def: $vgpr6
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; SDAG-NEXT:  ; %bb.2: ; %itofp-if-else
+; SDAG-NEXT:    v_add_u32_e32 v4, 0xffffff98, v9
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v4, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v4
+; SDAG-NEXT:    v_cndmask_b32_e32 v6, 0, v0, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr9
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:  ; %bb.3: ; %Flow6
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; SDAG-NEXT:    s_cbranch_execz .LBB0_15
+; SDAG-NEXT:  ; %bb.4: ; %NodeBlock
+; SDAG-NEXT:    v_sub_u32_e32 v8, 0x80, v9
+; SDAG-NEXT:    v_cmp_lt_i32_e32 vcc, 25, v8
+; SDAG-NEXT:    s_mov_b64 s[10:11], 0
+; SDAG-NEXT:    s_mov_b64 s[4:5], 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:  ; %bb.5: ; %LeafBlock1
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 26, v8
+; SDAG-NEXT:    s_and_b64 s[4:5], vcc, exec
+; SDAG-NEXT:  ; %bb.6: ; %Flow3
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; SDAG-NEXT:  ; %bb.7: ; %LeafBlock
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 25, v8
+; SDAG-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; SDAG-NEXT:    s_and_b64 s[14:15], vcc, exec
+; SDAG-NEXT:    s_mov_b64 s[10:11], exec
+; SDAG-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; SDAG-NEXT:  ; %bb.8: ; %Flow4
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    v_mov_b32_e32 v7, v1
+; SDAG-NEXT:    v_mov_b32_e32 v6, v0
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB0_10
+; SDAG-NEXT:  ; %bb.9: ; %itofp-sw-default
+; SDAG-NEXT:    v_sub_u32_e32 v12, 0x66, v9
+; SDAG-NEXT:    v_sub_u32_e32 v10, 64, v12
+; SDAG-NEXT:    v_lshrrev_b64 v[6:7], v12, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[10:11], v10, v[4:5]
+; SDAG-NEXT:    v_sub_u32_e32 v13, 38, v9
+; SDAG-NEXT:    v_or_b32_e32 v11, v7, v11
+; SDAG-NEXT:    v_or_b32_e32 v10, v6, v10
+; SDAG-NEXT:    v_lshrrev_b64 v[6:7], v13, v[4:5]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v12
+; SDAG-NEXT:    v_add_u32_e32 v14, 26, v9
+; SDAG-NEXT:    v_cndmask_b32_e32 v7, v7, v11, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v12
+; SDAG-NEXT:    v_cndmask_b32_e32 v6, v6, v10, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[10:11], v13, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[12:13], v14, v[4:5]
+; SDAG-NEXT:    v_subrev_u32_e32 v9, 38, v9
+; SDAG-NEXT:    v_cndmask_b32_e64 v15, v6, v0, s[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v6, v13, v11
+; SDAG-NEXT:    v_or_b32_e32 v11, v12, v10
+; SDAG-NEXT:    v_lshlrev_b64 v[9:10], v9, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v14
+; SDAG-NEXT:    v_cndmask_b32_e64 v7, v7, v1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v6, v10, v6, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v14
+; SDAG-NEXT:    v_cndmask_b32_e64 v10, v6, v5, s[4:5]
+; SDAG-NEXT:    v_lshlrev_b64 v[5:6], v14, v[0:1]
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, v9, v11, vcc
+; SDAG-NEXT:    v_cndmask_b32_e64 v4, v9, v4, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v6, 0, v6, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, 0, v5, vcc
+; SDAG-NEXT:    v_or_b32_e32 v5, v6, v10
+; SDAG-NEXT:    v_or_b32_e32 v4, v9, v4
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    s_andn2_b64 s[10:11], s[10:11], exec
+; SDAG-NEXT:    v_cndmask_b32_e64 v4, 0, 1, vcc
+; SDAG-NEXT:    v_or_b32_e32 v6, v15, v4
+; SDAG-NEXT:  .LBB0_10: ; %Flow5
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; SDAG-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; SDAG-NEXT:    v_lshlrev_b64 v[6:7], 1, v[0:1]
+; SDAG-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:    v_lshrrev_b32_e32 v0, 2, v6
+; SDAG-NEXT:    v_and_or_b32 v0, v0, 1, v6
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v7, vcc
+; SDAG-NEXT:    v_and_b32_e32 v4, 0x4000000, v0
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v4
+; SDAG-NEXT:    v_alignbit_b32 v6, v1, v0, 2
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:  ; %bb.13: ; %itofp-if-then20
+; SDAG-NEXT:    v_alignbit_b32 v6, v1, v0, 3
+; SDAG-NEXT:    v_mov_b32_e32 v2, v8
+; SDAG-NEXT:  ; %bb.14: ; %Flow
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:  .LBB0_15: ; %Flow7
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    v_and_b32_e32 v0, 0x80000000, v3
+; SDAG-NEXT:    v_lshl_add_u32 v1, v2, 23, 1.0
+; SDAG-NEXT:    v_and_b32_e32 v2, 0x7fffff, v6
+; SDAG-NEXT:    v_or3_b32 v4, v2, v0, v1
+; SDAG-NEXT:  .LBB0_16: ; %Flow8
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v4
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: sitofp_i128_to_f32:
+; GISEL:       ; %bb.0: ; %itofp-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_or_b32_e32 v4, v0, v2
+; GISEL-NEXT:    v_or_b32_e32 v5, v1, v3
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; GISEL-NEXT:    s_mov_b32 s4, 0
+; GISEL-NEXT:    v_mov_b32_e32 v4, s4
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB0_16
+; GISEL-NEXT:  ; %bb.1: ; %itofp-if-end
+; GISEL-NEXT:    v_ashrrev_i32_e32 v8, 31, v3
+; GISEL-NEXT:    v_xor_b32_e32 v0, v8, v0
+; GISEL-NEXT:    v_xor_b32_e32 v1, v8, v1
+; GISEL-NEXT:    v_sub_co_u32_e32 v0, vcc, v0, v8
+; GISEL-NEXT:    v_xor_b32_e32 v2, v8, v2
+; GISEL-NEXT:    v_subb_co_u32_e32 v1, vcc, v1, v8, vcc
+; GISEL-NEXT:    v_xor_b32_e32 v3, v8, v3
+; GISEL-NEXT:    v_subb_co_u32_e32 v6, vcc, v2, v8, vcc
+; GISEL-NEXT:    v_subb_co_u32_e32 v7, vcc, v3, v8, vcc
+; GISEL-NEXT:    v_ffbh_u32_e32 v3, v0
+; GISEL-NEXT:    v_ffbh_u32_e32 v2, v1
+; GISEL-NEXT:    v_add_u32_e32 v3, 32, v3
+; GISEL-NEXT:    v_ffbh_u32_e32 v4, v6
+; GISEL-NEXT:    v_min_u32_e32 v2, v2, v3
+; GISEL-NEXT:    v_ffbh_u32_e32 v3, v7
+; GISEL-NEXT:    v_add_u32_e32 v4, 32, v4
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[6:7]
+; GISEL-NEXT:    v_add_u32_e32 v2, 64, v2
+; GISEL-NEXT:    v_min_u32_e32 v3, v3, v4
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, v3, v2, vcc
+; GISEL-NEXT:    v_sub_u32_e32 v9, 0x7f, v11
+; GISEL-NEXT:    v_cmp_ge_i32_e32 vcc, 24, v9
+; GISEL-NEXT:    ; implicit-def: $vgpr2
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; GISEL-NEXT:  ; %bb.2: ; %itofp-if-else
+; GISEL-NEXT:    v_add_u32_e32 v2, 0xffffff98, v11
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, 0, v0, vcc
+; GISEL-NEXT:    ; implicit-def: $vgpr0
+; GISEL-NEXT:    ; implicit-def: $vgpr11
+; GISEL-NEXT:    ; implicit-def: $vgpr6
+; GISEL-NEXT:  ; %bb.3: ; %Flow6
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; GISEL-NEXT:    s_cbranch_execz .LBB0_15
+; GISEL-NEXT:  ; %bb.4: ; %NodeBlock
+; GISEL-NEXT:    v_sub_u32_e32 v10, 0x80, v11
+; GISEL-NEXT:    v_cmp_le_i32_e32 vcc, 26, v10
+; GISEL-NEXT:    s_mov_b64 s[10:11], 0
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:  ; %bb.5: ; %LeafBlock1
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 26, v10
+; GISEL-NEXT:    s_andn2_b64 s[4:5], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.6: ; %Flow3
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; GISEL-NEXT:  ; %bb.7: ; %LeafBlock
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 25, v10
+; GISEL-NEXT:    s_andn2_b64 s[10:11], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, -1
+; GISEL-NEXT:    s_or_b64 s[10:11], s[10:11], s[14:15]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.8: ; %Flow4
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    v_mov_b32_e32 v5, v3
+; GISEL-NEXT:    v_mov_b32_e32 v4, v2
+; GISEL-NEXT:    v_mov_b32_e32 v3, v1
+; GISEL-NEXT:    v_mov_b32_e32 v2, v0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:    s_cbranch_execz .LBB0_10
+; GISEL-NEXT:  ; %bb.9: ; %itofp-sw-default
+; GISEL-NEXT:    v_sub_u32_e32 v12, 0x66, v11
+; GISEL-NEXT:    v_sub_u32_e32 v4, 64, v12
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v12, v[0:1]
+; GISEL-NEXT:    v_lshlrev_b64 v[4:5], v4, v[6:7]
+; GISEL-NEXT:    v_subrev_u32_e32 v13, 64, v12
+; GISEL-NEXT:    v_or_b32_e32 v4, v2, v4
+; GISEL-NEXT:    v_or_b32_e32 v5, v3, v5
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v13, v[6:7]
+; GISEL-NEXT:    v_add_u32_e32 v13, 26, v11
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v12
+; GISEL-NEXT:    v_sub_u32_e32 v11, 64, v13
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v2, v4, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v3, v5, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v12
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], v13, -1
+; GISEL-NEXT:    v_lshlrev_b64 v[11:12], v11, -1
+; GISEL-NEXT:    v_subrev_u32_e32 v14, 64, v13
+; GISEL-NEXT:    v_or_b32_e32 v15, v4, v11
+; GISEL-NEXT:    v_or_b32_e32 v16, v5, v12
+; GISEL-NEXT:    v_lshrrev_b64 v[11:12], v14, -1
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v2, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v3, v1, vcc
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v13
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, v11, v15, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, v12, v16, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v13
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, 0, v4, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v5, 0, v5, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v11, v11, -1, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v12, v12, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v4, v4, v6
+; GISEL-NEXT:    v_and_b32_e32 v5, v5, v7
+; GISEL-NEXT:    v_and_or_b32 v4, v11, v0, v4
+; GISEL-NEXT:    v_and_or_b32 v5, v12, v1, v5
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[10:11], exec
+; GISEL-NEXT:    v_cndmask_b32_e64 v4, 0, 1, vcc
+; GISEL-NEXT:    s_and_b64 s[10:11], exec, 0
+; GISEL-NEXT:    v_or_b32_e32 v2, v2, v4
+; GISEL-NEXT:    s_or_b64 s[10:11], s[4:5], s[10:11]
+; GISEL-NEXT:  .LBB0_10: ; %Flow5
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; GISEL-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; GISEL-NEXT:    v_lshlrev_b64 v[2:3], 1, v[0:1]
+; GISEL-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:    v_bfe_u32 v0, v2, 2, 1
+; GISEL-NEXT:    v_or_b32_e32 v0, v2, v0
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; GISEL-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v3, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], 2, v[0:1]
+; GISEL-NEXT:    v_and_b32_e32 v3, 0x4000000, v0
+; GISEL-NEXT:    v_mov_b32_e32 v4, 0
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[3:4]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:  ; %bb.13: ; %itofp-if-then20
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], 3, v[0:1]
+; GISEL-NEXT:    v_mov_b32_e32 v9, v10
+; GISEL-NEXT:  ; %bb.14: ; %Flow
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:  .LBB0_15: ; %Flow7
+; GISEL-NEXT:    s_or_b64 exec, exec, s[8:9]
+; GISEL-NEXT:    v_and_b32_e32 v0, 0x80000000, v8
+; GISEL-NEXT:    v_lshl_add_u32 v1, v9, 23, 1.0
+; GISEL-NEXT:    v_and_b32_e32 v2, 0x7fffff, v2
+; GISEL-NEXT:    v_or3_b32 v4, v2, v0, v1
+; GISEL-NEXT:  .LBB0_16: ; %Flow8
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:    v_mov_b32_e32 v0, v4
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = sitofp i128 %x to float
+  ret float %cvt
+}
+
+define float @uitofp_i128_to_f32(i128 %x) {
+; SDAG-LABEL: uitofp_i128_to_f32:
+; SDAG:       ; %bb.0: ; %itofp-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_or_b32_e32 v5, v1, v3
+; SDAG-NEXT:    v_or_b32_e32 v4, v0, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v4, 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB1_16
+; SDAG-NEXT:  ; %bb.1: ; %itofp-if-end
+; SDAG-NEXT:    v_ffbh_u32_e32 v4, v2
+; SDAG-NEXT:    v_add_u32_e32 v4, 32, v4
+; SDAG-NEXT:    v_ffbh_u32_e32 v5, v3
+; SDAG-NEXT:    v_min_u32_e32 v4, v4, v5
+; SDAG-NEXT:    v_ffbh_u32_e32 v5, v0
+; SDAG-NEXT:    v_add_u32_e32 v5, 32, v5
+; SDAG-NEXT:    v_ffbh_u32_e32 v6, v1
+; SDAG-NEXT:    v_min_u32_e32 v5, v5, v6
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    v_add_u32_e32 v5, 64, v5
+; SDAG-NEXT:    v_cndmask_b32_e32 v8, v5, v4, vcc
+; SDAG-NEXT:    v_sub_u32_e32 v6, 0x7f, v8
+; SDAG-NEXT:    v_mov_b32_e32 v5, v1
+; SDAG-NEXT:    v_cmp_gt_i32_e32 vcc, 25, v6
+; SDAG-NEXT:    v_mov_b32_e32 v4, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr9
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; SDAG-NEXT:  ; %bb.2: ; %itofp-if-else
+; SDAG-NEXT:    v_add_u32_e32 v2, 0xffffff98, v8
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, 0, v0, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr8
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:  ; %bb.3: ; %Flow6
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; SDAG-NEXT:    s_cbranch_execz .LBB1_15
+; SDAG-NEXT:  ; %bb.4: ; %NodeBlock
+; SDAG-NEXT:    v_sub_u32_e32 v7, 0x80, v8
+; SDAG-NEXT:    v_cmp_lt_i32_e32 vcc, 25, v7
+; SDAG-NEXT:    s_mov_b64 s[10:11], 0
+; SDAG-NEXT:    s_mov_b64 s[4:5], 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:  ; %bb.5: ; %LeafBlock1
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 26, v7
+; SDAG-NEXT:    s_and_b64 s[4:5], vcc, exec
+; SDAG-NEXT:  ; %bb.6: ; %Flow3
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; SDAG-NEXT:  ; %bb.7: ; %LeafBlock
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 25, v7
+; SDAG-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; SDAG-NEXT:    s_and_b64 s[14:15], vcc, exec
+; SDAG-NEXT:    s_mov_b64 s[10:11], exec
+; SDAG-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:  ; %bb.8: ; %Flow4
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB1_10
+; SDAG-NEXT:  ; %bb.9: ; %itofp-sw-default
+; SDAG-NEXT:    v_sub_u32_e32 v11, 0x66, v8
+; SDAG-NEXT:    v_sub_u32_e32 v9, 64, v11
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], v11, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[9:10], v9, v[2:3]
+; SDAG-NEXT:    v_sub_u32_e32 v12, 38, v8
+; SDAG-NEXT:    v_or_b32_e32 v10, v5, v10
+; SDAG-NEXT:    v_or_b32_e32 v9, v4, v9
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], v12, v[2:3]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v11
+; SDAG-NEXT:    v_add_u32_e32 v13, 26, v8
+; SDAG-NEXT:    v_cndmask_b32_e32 v5, v5, v10, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v11
+; SDAG-NEXT:    v_cndmask_b32_e32 v4, v4, v9, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[9:10], v12, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[11:12], v13, v[2:3]
+; SDAG-NEXT:    v_subrev_u32_e32 v8, 38, v8
+; SDAG-NEXT:    v_cndmask_b32_e64 v14, v4, v0, s[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v4, v12, v10
+; SDAG-NEXT:    v_or_b32_e32 v10, v11, v9
+; SDAG-NEXT:    v_lshlrev_b64 v[8:9], v8, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v13
+; SDAG-NEXT:    v_cndmask_b32_e64 v5, v5, v1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v4, v9, v4, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v13
+; SDAG-NEXT:    v_cndmask_b32_e64 v9, v4, v3, s[4:5]
+; SDAG-NEXT:    v_lshlrev_b64 v[3:4], v13, v[0:1]
+; SDAG-NEXT:    v_cndmask_b32_e32 v8, v8, v10, vcc
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, v8, v2, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v4, 0, v4, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v8, 0, v3, vcc
+; SDAG-NEXT:    v_or_b32_e32 v3, v4, v9
+; SDAG-NEXT:    v_or_b32_e32 v2, v8, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    s_andn2_b64 s[10:11], s[10:11], exec
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, 1, vcc
+; SDAG-NEXT:    v_or_b32_e32 v4, v14, v2
+; SDAG-NEXT:  .LBB1_10: ; %Flow5
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; SDAG-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; SDAG-NEXT:    v_lshlrev_b64 v[4:5], 1, v[0:1]
+; SDAG-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:    v_lshrrev_b32_e32 v0, 2, v4
+; SDAG-NEXT:    v_and_or_b32 v0, v0, 1, v4
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v5, vcc
+; SDAG-NEXT:    v_and_b32_e32 v2, 0x4000000, v0
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v2
+; SDAG-NEXT:    v_alignbit_b32 v9, v1, v0, 2
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:  ; %bb.13: ; %itofp-if-then20
+; SDAG-NEXT:    v_alignbit_b32 v9, v1, v0, 3
+; SDAG-NEXT:    v_mov_b32_e32 v6, v7
+; SDAG-NEXT:  ; %bb.14: ; %Flow
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:  .LBB1_15: ; %Flow7
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    v_and_b32_e32 v0, 0x7fffff, v9
+; SDAG-NEXT:    v_lshl_or_b32 v0, v6, 23, v0
+; SDAG-NEXT:    v_add_u32_e32 v4, 1.0, v0
+; SDAG-NEXT:  .LBB1_16: ; %Flow8
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v4
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: uitofp_i128_to_f32:
+; GISEL:       ; %bb.0: ; %itofp-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_mov_b32_e32 v4, v2
+; GISEL-NEXT:    v_mov_b32_e32 v5, v3
+; GISEL-NEXT:    v_or_b32_e32 v2, v0, v4
+; GISEL-NEXT:    v_or_b32_e32 v3, v1, v5
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; GISEL-NEXT:    s_mov_b32 s4, 0
+; GISEL-NEXT:    v_mov_b32_e32 v2, s4
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB1_16
+; GISEL-NEXT:  ; %bb.1: ; %itofp-if-end
+; GISEL-NEXT:    v_ffbh_u32_e32 v3, v0
+; GISEL-NEXT:    v_ffbh_u32_e32 v2, v1
+; GISEL-NEXT:    v_add_u32_e32 v3, 32, v3
+; GISEL-NEXT:    v_ffbh_u32_e32 v6, v4
+; GISEL-NEXT:    v_min_u32_e32 v2, v2, v3
+; GISEL-NEXT:    v_ffbh_u32_e32 v3, v5
+; GISEL-NEXT:    v_add_u32_e32 v6, 32, v6
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[4:5]
+; GISEL-NEXT:    v_add_u32_e32 v2, 64, v2
+; GISEL-NEXT:    v_min_u32_e32 v3, v3, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, v3, v2, vcc
+; GISEL-NEXT:    v_sub_u32_e32 v10, 0x7f, v12
+; GISEL-NEXT:    v_cmp_ge_i32_e32 vcc, 24, v10
+; GISEL-NEXT:    ; implicit-def: $vgpr2
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; GISEL-NEXT:  ; %bb.2: ; %itofp-if-else
+; GISEL-NEXT:    v_add_u32_e32 v2, 0xffffff98, v12
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, 0, v0, vcc
+; GISEL-NEXT:    ; implicit-def: $vgpr0
+; GISEL-NEXT:    ; implicit-def: $vgpr12
+; GISEL-NEXT:    ; implicit-def: $vgpr4
+; GISEL-NEXT:  ; %bb.3: ; %Flow6
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; GISEL-NEXT:    s_cbranch_execz .LBB1_15
+; GISEL-NEXT:  ; %bb.4: ; %NodeBlock
+; GISEL-NEXT:    v_sub_u32_e32 v11, 0x80, v12
+; GISEL-NEXT:    v_cmp_le_i32_e32 vcc, 26, v11
+; GISEL-NEXT:    s_mov_b64 s[10:11], 0
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:  ; %bb.5: ; %LeafBlock1
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 26, v11
+; GISEL-NEXT:    s_andn2_b64 s[4:5], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.6: ; %Flow3
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; GISEL-NEXT:  ; %bb.7: ; %LeafBlock
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 25, v11
+; GISEL-NEXT:    s_andn2_b64 s[10:11], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, -1
+; GISEL-NEXT:    s_or_b64 s[10:11], s[10:11], s[14:15]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.8: ; %Flow4
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    v_mov_b32_e32 v9, v3
+; GISEL-NEXT:    v_mov_b32_e32 v7, v1
+; GISEL-NEXT:    v_mov_b32_e32 v6, v0
+; GISEL-NEXT:    v_mov_b32_e32 v8, v2
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:    s_cbranch_execz .LBB1_10
+; GISEL-NEXT:  ; %bb.9: ; %itofp-sw-default
+; GISEL-NEXT:    v_sub_u32_e32 v8, 0x66, v12
+; GISEL-NEXT:    v_sub_u32_e32 v6, 64, v8
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v8, v[0:1]
+; GISEL-NEXT:    v_lshlrev_b64 v[6:7], v6, v[4:5]
+; GISEL-NEXT:    v_subrev_u32_e32 v9, 64, v8
+; GISEL-NEXT:    v_or_b32_e32 v6, v2, v6
+; GISEL-NEXT:    v_or_b32_e32 v7, v3, v7
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v9, v[4:5]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v8
+; GISEL-NEXT:    v_add_u32_e32 v12, 26, v12
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v2, v6, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v3, v7, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v8
+; GISEL-NEXT:    v_sub_u32_e32 v8, 64, v12
+; GISEL-NEXT:    v_cndmask_b32_e32 v6, v2, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v7, v3, v1, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v12, -1
+; GISEL-NEXT:    v_lshlrev_b64 v[8:9], v8, -1
+; GISEL-NEXT:    v_subrev_u32_e32 v13, 64, v12
+; GISEL-NEXT:    v_or_b32_e32 v14, v2, v8
+; GISEL-NEXT:    v_or_b32_e32 v15, v3, v9
+; GISEL-NEXT:    v_lshrrev_b64 v[8:9], v13, -1
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v12
+; GISEL-NEXT:    v_cndmask_b32_e32 v8, v8, v14, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v9, v9, v15, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v12
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, 0, v2, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, 0, v3, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v8, v8, -1, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v9, v9, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v2, v2, v4
+; GISEL-NEXT:    v_and_b32_e32 v3, v3, v5
+; GISEL-NEXT:    v_and_or_b32 v2, v8, v0, v2
+; GISEL-NEXT:    v_and_or_b32 v3, v9, v1, v3
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[10:11], exec
+; GISEL-NEXT:    v_cndmask_b32_e64 v2, 0, 1, vcc
+; GISEL-NEXT:    s_and_b64 s[10:11], exec, 0
+; GISEL-NEXT:    v_or_b32_e32 v6, v6, v2
+; GISEL-NEXT:    s_or_b64 s[10:11], s[4:5], s[10:11]
+; GISEL-NEXT:  .LBB1_10: ; %Flow5
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; GISEL-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; GISEL-NEXT:    v_lshlrev_b64 v[6:7], 1, v[0:1]
+; GISEL-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:    v_bfe_u32 v0, v6, 2, 1
+; GISEL-NEXT:    v_or_b32_e32 v0, v6, v0
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; GISEL-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v7, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], 2, v[0:1]
+; GISEL-NEXT:    v_and_b32_e32 v3, 0x4000000, v0
+; GISEL-NEXT:    v_mov_b32_e32 v4, 0
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[3:4]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:  ; %bb.13: ; %itofp-if-then20
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], 3, v[0:1]
+; GISEL-NEXT:    v_mov_b32_e32 v10, v11
+; GISEL-NEXT:  ; %bb.14: ; %Flow
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:  .LBB1_15: ; %Flow7
+; GISEL-NEXT:    s_or_b64 exec, exec, s[8:9]
+; GISEL-NEXT:    v_lshl_add_u32 v0, v10, 23, 1.0
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0x7fffff
+; GISEL-NEXT:    v_and_or_b32 v2, v2, v1, v0
+; GISEL-NEXT:  .LBB1_16: ; %Flow8
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:    v_mov_b32_e32 v0, v2
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = uitofp i128 %x to float
+  ret float %cvt
+}
+
+define double @sitofp_i128_to_f64(i128 %x) {
+; SDAG-LABEL: sitofp_i128_to_f64:
+; SDAG:       ; %bb.0: ; %itofp-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_mov_b32_e32 v5, v1
+; SDAG-NEXT:    v_mov_b32_e32 v4, v0
+; SDAG-NEXT:    v_or_b32_e32 v1, v5, v3
+; SDAG-NEXT:    v_or_b32_e32 v0, v4, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[0:1]
+; SDAG-NEXT:    v_mov_b32_e32 v0, 0
+; SDAG-NEXT:    v_mov_b32_e32 v1, 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB2_16
+; SDAG-NEXT:  ; %bb.1: ; %itofp-if-end
+; SDAG-NEXT:    v_ashrrev_i32_e32 v0, 31, v3
+; SDAG-NEXT:    v_xor_b32_e32 v4, v0, v4
+; SDAG-NEXT:    v_xor_b32_e32 v5, v0, v5
+; SDAG-NEXT:    v_sub_co_u32_e32 v4, vcc, v4, v0
+; SDAG-NEXT:    v_xor_b32_e32 v2, v0, v2
+; SDAG-NEXT:    v_subb_co_u32_e32 v5, vcc, v5, v0, vcc
+; SDAG-NEXT:    v_xor_b32_e32 v1, v0, v3
+; SDAG-NEXT:    v_subb_co_u32_e32 v6, vcc, v2, v0, vcc
+; SDAG-NEXT:    v_subb_co_u32_e32 v7, vcc, v1, v0, vcc
+; SDAG-NEXT:    v_ffbh_u32_e32 v0, v6
+; SDAG-NEXT:    v_add_u32_e32 v0, 32, v0
+; SDAG-NEXT:    v_ffbh_u32_e32 v1, v7
+; SDAG-NEXT:    v_min_u32_e32 v0, v0, v1
+; SDAG-NEXT:    v_ffbh_u32_e32 v1, v4
+; SDAG-NEXT:    v_add_u32_e32 v1, 32, v1
+; SDAG-NEXT:    v_ffbh_u32_e32 v2, v5
+; SDAG-NEXT:    v_min_u32_e32 v1, v1, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[6:7]
+; SDAG-NEXT:    v_add_u32_e32 v1, 64, v1
+; SDAG-NEXT:    v_cndmask_b32_e32 v11, v1, v0, vcc
+; SDAG-NEXT:    v_sub_u32_e32 v2, 0x7f, v11
+; SDAG-NEXT:    v_cmp_gt_i32_e32 vcc, 54, v2
+; SDAG-NEXT:    ; implicit-def: $vgpr8
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; SDAG-NEXT:  ; %bb.2: ; %itofp-if-else
+; SDAG-NEXT:    v_add_u32_e32 v6, 0xffffffb5, v11
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v6, v[4:5]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v6
+; SDAG-NEXT:    v_cndmask_b32_e32 v8, 0, v1, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, 0, v0, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr6_vgpr7
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:    ; implicit-def: $vgpr11
+; SDAG-NEXT:  ; %bb.3: ; %Flow6
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; SDAG-NEXT:    s_cbranch_execz .LBB2_15
+; SDAG-NEXT:  ; %bb.4: ; %NodeBlock
+; SDAG-NEXT:    v_sub_u32_e32 v10, 0x80, v11
+; SDAG-NEXT:    v_cmp_lt_i32_e32 vcc, 54, v10
+; SDAG-NEXT:    s_mov_b64 s[10:11], 0
+; SDAG-NEXT:    s_mov_b64 s[4:5], 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:  ; %bb.5: ; %LeafBlock1
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 55, v10
+; SDAG-NEXT:    s_and_b64 s[4:5], vcc, exec
+; SDAG-NEXT:  ; %bb.6: ; %Flow3
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; SDAG-NEXT:  ; %bb.7: ; %LeafBlock
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 54, v10
+; SDAG-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; SDAG-NEXT:    s_and_b64 s[14:15], vcc, exec
+; SDAG-NEXT:    s_mov_b64 s[10:11], exec
+; SDAG-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; SDAG-NEXT:  ; %bb.8: ; %Flow4
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v4
+; SDAG-NEXT:    v_mov_b32_e32 v9, v7
+; SDAG-NEXT:    v_mov_b32_e32 v1, v5
+; SDAG-NEXT:    v_mov_b32_e32 v8, v6
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB2_10
+; SDAG-NEXT:  ; %bb.9: ; %itofp-sw-default
+; SDAG-NEXT:    v_sub_u32_e32 v12, 0x49, v11
+; SDAG-NEXT:    v_sub_u32_e32 v8, 64, v12
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v12, v[4:5]
+; SDAG-NEXT:    v_lshlrev_b64 v[8:9], v8, v[6:7]
+; SDAG-NEXT:    v_sub_u32_e32 v13, 9, v11
+; SDAG-NEXT:    v_or_b32_e32 v9, v1, v9
+; SDAG-NEXT:    v_or_b32_e32 v8, v0, v8
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], v13, v[6:7]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v12
+; SDAG-NEXT:    v_cndmask_b32_e32 v1, v1, v9, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v0, v0, v8, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[8:9], v12, v[6:7]
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v12
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, 0, v9, vcc
+; SDAG-NEXT:    v_add_u32_e32 v9, 55, v11
+; SDAG-NEXT:    v_lshrrev_b64 v[12:13], v13, v[4:5]
+; SDAG-NEXT:    v_lshlrev_b64 v[14:15], v9, v[6:7]
+; SDAG-NEXT:    v_add_u32_e32 v11, -9, v11
+; SDAG-NEXT:    v_or_b32_e32 v14, v14, v12
+; SDAG-NEXT:    v_lshlrev_b64 v[11:12], v11, v[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v8, 0, v8, vcc
+; SDAG-NEXT:    v_or_b32_e32 v13, v15, v13
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v9
+; SDAG-NEXT:    v_cndmask_b32_e64 v1, v1, v5, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v0, v0, v4, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v12, v12, v13, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v9
+; SDAG-NEXT:    v_cndmask_b32_e64 v13, v12, v7, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v14, v11, v14, vcc
+; SDAG-NEXT:    v_lshlrev_b64 v[11:12], v9, v[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e64 v9, v14, v6, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v12, 0, v12, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v11, 0, v11, vcc
+; SDAG-NEXT:    v_or_b32_e32 v12, v12, v13
+; SDAG-NEXT:    v_or_b32_e32 v11, v11, v9
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[11:12]
+; SDAG-NEXT:    s_andn2_b64 s[10:11], s[10:11], exec
+; SDAG-NEXT:    v_cndmask_b32_e64 v9, 0, 1, vcc
+; SDAG-NEXT:    v_or_b32_e32 v0, v0, v9
+; SDAG-NEXT:  .LBB2_10: ; %Flow5
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; SDAG-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; SDAG-NEXT:    v_lshlrev_b64 v[8:9], 1, v[6:7]
+; SDAG-NEXT:    v_lshrrev_b32_e32 v6, 31, v5
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], 1, v[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v8, v8, v6
+; SDAG-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:    v_lshrrev_b32_e32 v4, 2, v0
+; SDAG-NEXT:    v_and_or_b32 v0, v4, 1, v0
+; SDAG-NEXT:    v_add_co_u32_e32 v4, vcc, 1, v0
+; SDAG-NEXT:    v_addc_co_u32_e32 v5, vcc, 0, v1, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v6, vcc, 0, v8, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], 2, v[4:5]
+; SDAG-NEXT:    v_lshlrev_b32_e32 v7, 30, v6
+; SDAG-NEXT:    v_or_b32_e32 v8, v1, v7
+; SDAG-NEXT:    v_and_b32_e32 v1, 0x800000, v5
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v1
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:  ; %bb.13: ; %itofp-if-then20
+; SDAG-NEXT:    v_lshrrev_b64 v[0:1], 3, v[4:5]
+; SDAG-NEXT:    v_lshlrev_b32_e32 v2, 29, v6
+; SDAG-NEXT:    v_or_b32_e32 v8, v1, v2
+; SDAG-NEXT:    v_mov_b32_e32 v2, v10
+; SDAG-NEXT:  ; %bb.14: ; %Flow
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:  .LBB2_15: ; %Flow7
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    v_and_b32_e32 v1, 0x80000000, v3
+; SDAG-NEXT:    v_mov_b32_e32 v3, 0x3ff00000
+; SDAG-NEXT:    v_lshl_add_u32 v2, v2, 20, v3
+; SDAG-NEXT:    v_and_b32_e32 v3, 0xfffff, v8
+; SDAG-NEXT:    v_or3_b32 v1, v3, v1, v2
+; SDAG-NEXT:  .LBB2_16: ; %Flow8
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: sitofp_i128_to_f64:
+; GISEL:       ; %bb.0: ; %itofp-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_mov_b32_e32 v4, v0
+; GISEL-NEXT:    v_mov_b32_e32 v5, v1
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    v_or_b32_e32 v0, v4, v2
+; GISEL-NEXT:    v_or_b32_e32 v1, v5, v3
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[0:1]
+; GISEL-NEXT:    v_mov_b32_e32 v0, s4
+; GISEL-NEXT:    v_mov_b32_e32 v1, s5
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB2_16
+; GISEL-NEXT:  ; %bb.1: ; %itofp-if-end
+; GISEL-NEXT:    v_ashrrev_i32_e32 v8, 31, v3
+; GISEL-NEXT:    v_xor_b32_e32 v0, v8, v4
+; GISEL-NEXT:    v_xor_b32_e32 v1, v8, v5
+; GISEL-NEXT:    v_sub_co_u32_e32 v0, vcc, v0, v8
+; GISEL-NEXT:    v_xor_b32_e32 v2, v8, v2
+; GISEL-NEXT:    v_subb_co_u32_e32 v1, vcc, v1, v8, vcc
+; GISEL-NEXT:    v_xor_b32_e32 v3, v8, v3
+; GISEL-NEXT:    v_subb_co_u32_e32 v2, vcc, v2, v8, vcc
+; GISEL-NEXT:    v_ffbh_u32_e32 v5, v0
+; GISEL-NEXT:    v_subb_co_u32_e32 v3, vcc, v3, v8, vcc
+; GISEL-NEXT:    v_ffbh_u32_e32 v4, v1
+; GISEL-NEXT:    v_add_u32_e32 v5, 32, v5
+; GISEL-NEXT:    v_ffbh_u32_e32 v6, v2
+; GISEL-NEXT:    v_min_u32_e32 v4, v4, v5
+; GISEL-NEXT:    v_ffbh_u32_e32 v5, v3
+; GISEL-NEXT:    v_add_u32_e32 v6, 32, v6
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[2:3]
+; GISEL-NEXT:    v_add_u32_e32 v4, 64, v4
+; GISEL-NEXT:    v_min_u32_e32 v5, v5, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, v5, v4, vcc
+; GISEL-NEXT:    v_sub_u32_e32 v9, 0x7f, v11
+; GISEL-NEXT:    v_cmp_ge_i32_e32 vcc, 53, v9
+; GISEL-NEXT:    ; implicit-def: $vgpr6
+; GISEL-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; GISEL-NEXT:  ; %bb.2: ; %itofp-if-else
+; GISEL-NEXT:    v_add_u32_e32 v2, 0xffffffb5, v11
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, 0, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v6, 0, v1, vcc
+; GISEL-NEXT:    ; implicit-def: $vgpr0
+; GISEL-NEXT:    ; implicit-def: $vgpr11
+; GISEL-NEXT:  ; %bb.3: ; %Flow6
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; GISEL-NEXT:    s_cbranch_execz .LBB2_15
+; GISEL-NEXT:  ; %bb.4: ; %NodeBlock
+; GISEL-NEXT:    v_sub_u32_e32 v10, 0x80, v11
+; GISEL-NEXT:    v_cmp_le_i32_e32 vcc, 55, v10
+; GISEL-NEXT:    s_mov_b64 s[10:11], 0
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:  ; %bb.5: ; %LeafBlock1
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 55, v10
+; GISEL-NEXT:    s_andn2_b64 s[4:5], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.6: ; %Flow3
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; GISEL-NEXT:  ; %bb.7: ; %LeafBlock
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 54, v10
+; GISEL-NEXT:    s_andn2_b64 s[10:11], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, -1
+; GISEL-NEXT:    s_or_b64 s[10:11], s[10:11], s[14:15]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.8: ; %Flow4
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    v_mov_b32_e32 v7, v3
+; GISEL-NEXT:    v_mov_b32_e32 v6, v2
+; GISEL-NEXT:    v_mov_b32_e32 v5, v1
+; GISEL-NEXT:    v_mov_b32_e32 v4, v0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:    s_cbranch_execz .LBB2_10
+; GISEL-NEXT:  ; %bb.9: ; %itofp-sw-default
+; GISEL-NEXT:    v_sub_u32_e32 v14, 0x49, v11
+; GISEL-NEXT:    v_sub_u32_e32 v6, 64, v14
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], v14, v[0:1]
+; GISEL-NEXT:    v_lshlrev_b64 v[6:7], v6, v[2:3]
+; GISEL-NEXT:    v_subrev_u32_e32 v15, 64, v14
+; GISEL-NEXT:    v_or_b32_e32 v6, v4, v6
+; GISEL-NEXT:    v_or_b32_e32 v7, v5, v7
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], v15, v[2:3]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v14
+; GISEL-NEXT:    v_lshrrev_b64 v[12:13], v14, v[2:3]
+; GISEL-NEXT:    v_cndmask_b32_e32 v5, v5, v7, vcc
+; GISEL-NEXT:    v_add_u32_e32 v7, 55, v11
+; GISEL-NEXT:    v_sub_u32_e32 v13, 64, v7
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, v4, v6, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v14
+; GISEL-NEXT:    v_cndmask_b32_e32 v6, 0, v12, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[11:12], v7, -1
+; GISEL-NEXT:    v_lshlrev_b64 v[13:14], v13, -1
+; GISEL-NEXT:    v_subrev_u32_e32 v15, 64, v7
+; GISEL-NEXT:    v_or_b32_e32 v16, v11, v13
+; GISEL-NEXT:    v_or_b32_e32 v17, v12, v14
+; GISEL-NEXT:    v_lshrrev_b64 v[13:14], v15, -1
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v7
+; GISEL-NEXT:    v_cndmask_b32_e64 v4, v4, v0, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v5, v5, v1, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e32 v13, v13, v16, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v14, v14, v17, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v7
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, 0, v11, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, 0, v12, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v7, v13, -1, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v13, v14, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v11, v11, v2
+; GISEL-NEXT:    v_and_b32_e32 v12, v12, v3
+; GISEL-NEXT:    v_and_or_b32 v11, v7, v0, v11
+; GISEL-NEXT:    v_and_or_b32 v12, v13, v1, v12
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[11:12]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[10:11], exec
+; GISEL-NEXT:    v_cndmask_b32_e64 v7, 0, 1, vcc
+; GISEL-NEXT:    s_and_b64 s[10:11], exec, 0
+; GISEL-NEXT:    v_or_b32_e32 v4, v4, v7
+; GISEL-NEXT:    s_or_b64 s[10:11], s[4:5], s[10:11]
+; GISEL-NEXT:  .LBB2_10: ; %Flow5
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; GISEL-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; GISEL-NEXT:    v_lshlrev_b64 v[2:3], 1, v[2:3]
+; GISEL-NEXT:    v_lshlrev_b64 v[4:5], 1, v[0:1]
+; GISEL-NEXT:    v_lshrrev_b32_e32 v0, 31, v1
+; GISEL-NEXT:    v_or_b32_e32 v6, v2, v0
+; GISEL-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:    v_bfe_u32 v0, v4, 2, 1
+; GISEL-NEXT:    v_or_b32_e32 v0, v4, v0
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; GISEL-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v5, vcc
+; GISEL-NEXT:    v_addc_co_u32_e32 v2, vcc, 0, v6, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], 2, v[0:1]
+; GISEL-NEXT:    v_mov_b32_e32 v6, 0
+; GISEL-NEXT:    v_and_b32_e32 v7, 0x800000, v1
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[6:7]
+; GISEL-NEXT:    v_lshl_or_b32 v6, v2, 30, v5
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:  ; %bb.13: ; %itofp-if-then20
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], 3, v[0:1]
+; GISEL-NEXT:    v_mov_b32_e32 v9, v10
+; GISEL-NEXT:    v_lshl_or_b32 v6, v2, 29, v5
+; GISEL-NEXT:  ; %bb.14: ; %Flow
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:  .LBB2_15: ; %Flow7
+; GISEL-NEXT:    s_or_b64 exec, exec, s[8:9]
+; GISEL-NEXT:    v_and_b32_e32 v0, 0x80000000, v8
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0x3ff00000
+; GISEL-NEXT:    v_mov_b32_e32 v2, 0xfffff
+; GISEL-NEXT:    v_lshl_add_u32 v1, v9, 20, v1
+; GISEL-NEXT:    v_and_or_b32 v2, v6, v2, v0
+; GISEL-NEXT:    v_and_or_b32 v0, v4, -1, 0
+; GISEL-NEXT:    v_or3_b32 v1, v2, v1, 0
+; GISEL-NEXT:  .LBB2_16: ; %Flow8
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = sitofp i128 %x to double
+  ret double %cvt
+}
+
+define double @uitofp_i128_to_f64(i128 %x) {
+; SDAG-LABEL: uitofp_i128_to_f64:
+; SDAG:       ; %bb.0: ; %itofp-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_or_b32_e32 v5, v1, v3
+; SDAG-NEXT:    v_or_b32_e32 v4, v0, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v4, 0
+; SDAG-NEXT:    v_mov_b32_e32 v5, 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB3_16
+; SDAG-NEXT:  ; %bb.1: ; %itofp-if-end
+; SDAG-NEXT:    v_ffbh_u32_e32 v4, v2
+; SDAG-NEXT:    v_add_u32_e32 v4, 32, v4
+; SDAG-NEXT:    v_ffbh_u32_e32 v5, v3
+; SDAG-NEXT:    v_min_u32_e32 v4, v4, v5
+; SDAG-NEXT:    v_ffbh_u32_e32 v5, v0
+; SDAG-NEXT:    v_add_u32_e32 v5, 32, v5
+; SDAG-NEXT:    v_ffbh_u32_e32 v6, v1
+; SDAG-NEXT:    v_min_u32_e32 v5, v5, v6
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    v_add_u32_e32 v5, 64, v5
+; SDAG-NEXT:    v_cndmask_b32_e32 v11, v5, v4, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:    v_sub_u32_e32 v9, 0x7f, v11
+; SDAG-NEXT:    v_mov_b32_e32 v6, v1
+; SDAG-NEXT:    v_mov_b32_e32 v8, v3
+; SDAG-NEXT:    v_cmp_gt_i32_e32 vcc, 54, v9
+; SDAG-NEXT:    v_mov_b32_e32 v5, v0
+; SDAG-NEXT:    v_mov_b32_e32 v7, v2
+; SDAG-NEXT:    ; implicit-def: $vgpr12
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; SDAG-NEXT:  ; %bb.2: ; %itofp-if-else
+; SDAG-NEXT:    v_add_u32_e32 v2, 0xffffffb5, v11
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v12, 0, v1, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v4, 0, v0, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr11
+; SDAG-NEXT:    ; implicit-def: $vgpr5_vgpr6
+; SDAG-NEXT:    ; implicit-def: $vgpr7_vgpr8
+; SDAG-NEXT:  ; %bb.3: ; %Flow6
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; SDAG-NEXT:    s_cbranch_execz .LBB3_15
+; SDAG-NEXT:  ; %bb.4: ; %NodeBlock
+; SDAG-NEXT:    v_sub_u32_e32 v10, 0x80, v11
+; SDAG-NEXT:    v_cmp_lt_i32_e32 vcc, 54, v10
+; SDAG-NEXT:    s_mov_b64 s[10:11], 0
+; SDAG-NEXT:    s_mov_b64 s[4:5], 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:  ; %bb.5: ; %LeafBlock1
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 55, v10
+; SDAG-NEXT:    s_and_b64 s[4:5], vcc, exec
+; SDAG-NEXT:  ; %bb.6: ; %Flow3
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; SDAG-NEXT:  ; %bb.7: ; %LeafBlock
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 54, v10
+; SDAG-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; SDAG-NEXT:    s_and_b64 s[14:15], vcc, exec
+; SDAG-NEXT:    s_mov_b64 s[10:11], exec
+; SDAG-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; SDAG-NEXT:    ; implicit-def: $vgpr5_vgpr6
+; SDAG-NEXT:    ; implicit-def: $vgpr7_vgpr8
+; SDAG-NEXT:  ; %bb.8: ; %Flow4
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB3_10
+; SDAG-NEXT:  ; %bb.9: ; %itofp-sw-default
+; SDAG-NEXT:    v_sub_u32_e32 v8, 0x49, v11
+; SDAG-NEXT:    v_sub_u32_e32 v6, 64, v8
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], v8, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[6:7], v6, v[2:3]
+; SDAG-NEXT:    v_sub_u32_e32 v13, 9, v11
+; SDAG-NEXT:    v_or_b32_e32 v7, v5, v7
+; SDAG-NEXT:    v_or_b32_e32 v12, v4, v6
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], v13, v[2:3]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v8
+; SDAG-NEXT:    v_cndmask_b32_e32 v5, v5, v7, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v8
+; SDAG-NEXT:    v_cndmask_b32_e64 v6, v5, v1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v7, v4, v12, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], v8, v[2:3]
+; SDAG-NEXT:    v_add_u32_e32 v16, 55, v11
+; SDAG-NEXT:    v_cndmask_b32_e64 v8, v7, v0, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v5, 0, v5, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[12:13], v13, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[14:15], v16, v[2:3]
+; SDAG-NEXT:    v_cndmask_b32_e32 v7, 0, v4, vcc
+; SDAG-NEXT:    v_add_u32_e32 v4, -9, v11
+; SDAG-NEXT:    v_lshlrev_b64 v[4:5], v4, v[0:1]
+; SDAG-NEXT:    v_or_b32_e32 v13, v15, v13
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v16
+; SDAG-NEXT:    v_or_b32_e32 v12, v14, v12
+; SDAG-NEXT:    v_cndmask_b32_e32 v5, v5, v13, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v16
+; SDAG-NEXT:    v_cndmask_b32_e64 v11, v5, v3, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v12, v4, v12, vcc
+; SDAG-NEXT:    v_lshlrev_b64 v[4:5], v16, v[0:1]
+; SDAG-NEXT:    v_cndmask_b32_e64 v12, v12, v2, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v5, 0, v5, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v4, 0, v4, vcc
+; SDAG-NEXT:    v_or_b32_e32 v5, v5, v11
+; SDAG-NEXT:    v_or_b32_e32 v4, v4, v12
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    s_andn2_b64 s[10:11], s[10:11], exec
+; SDAG-NEXT:    v_cndmask_b32_e64 v4, 0, 1, vcc
+; SDAG-NEXT:    v_or_b32_e32 v5, v8, v4
+; SDAG-NEXT:  .LBB3_10: ; %Flow5
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; SDAG-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; SDAG-NEXT:    v_lshlrev_b64 v[7:8], 1, v[2:3]
+; SDAG-NEXT:    v_lshrrev_b32_e32 v2, 31, v1
+; SDAG-NEXT:    v_lshlrev_b64 v[5:6], 1, v[0:1]
+; SDAG-NEXT:    v_or_b32_e32 v7, v7, v2
+; SDAG-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:    v_lshrrev_b32_e32 v0, 2, v5
+; SDAG-NEXT:    v_and_or_b32 v0, v0, 1, v5
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v6, vcc
+; SDAG-NEXT:    v_addc_co_u32_e32 v2, vcc, 0, v7, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], 2, v[0:1]
+; SDAG-NEXT:    v_and_b32_e32 v3, 0x800000, v1
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v3
+; SDAG-NEXT:    v_alignbit_b32 v12, v2, v1, 2
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:  ; %bb.13: ; %itofp-if-then20
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], 3, v[0:1]
+; SDAG-NEXT:    v_alignbit_b32 v12, v2, v1, 3
+; SDAG-NEXT:    v_mov_b32_e32 v9, v10
+; SDAG-NEXT:  ; %bb.14: ; %Flow
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:  .LBB3_15: ; %Flow7
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    v_and_b32_e32 v0, 0xfffff, v12
+; SDAG-NEXT:    v_lshl_or_b32 v0, v9, 20, v0
+; SDAG-NEXT:    v_add_u32_e32 v5, 0x3ff00000, v0
+; SDAG-NEXT:  .LBB3_16: ; %Flow8
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v4
+; SDAG-NEXT:    v_mov_b32_e32 v1, v5
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: uitofp_i128_to_f64:
+; GISEL:       ; %bb.0: ; %itofp-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    v_or_b32_e32 v4, v0, v2
+; GISEL-NEXT:    v_or_b32_e32 v5, v1, v3
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; GISEL-NEXT:    v_mov_b32_e32 v4, s4
+; GISEL-NEXT:    v_mov_b32_e32 v5, s5
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB3_16
+; GISEL-NEXT:  ; %bb.1: ; %itofp-if-end
+; GISEL-NEXT:    v_ffbh_u32_e32 v5, v0
+; GISEL-NEXT:    v_ffbh_u32_e32 v4, v1
+; GISEL-NEXT:    v_add_u32_e32 v5, 32, v5
+; GISEL-NEXT:    v_ffbh_u32_e32 v6, v2
+; GISEL-NEXT:    v_min_u32_e32 v4, v4, v5
+; GISEL-NEXT:    v_ffbh_u32_e32 v5, v3
+; GISEL-NEXT:    v_add_u32_e32 v6, 32, v6
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[2:3]
+; GISEL-NEXT:    v_add_u32_e32 v4, 64, v4
+; GISEL-NEXT:    v_min_u32_e32 v5, v5, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v10, v5, v4, vcc
+; GISEL-NEXT:    v_sub_u32_e32 v8, 0x7f, v10
+; GISEL-NEXT:    v_cmp_ge_i32_e32 vcc, 53, v8
+; GISEL-NEXT:    ; implicit-def: $vgpr6
+; GISEL-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; GISEL-NEXT:  ; %bb.2: ; %itofp-if-else
+; GISEL-NEXT:    v_add_u32_e32 v2, 0xffffffb5, v10
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, 0, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v6, 0, v1, vcc
+; GISEL-NEXT:    ; implicit-def: $vgpr0
+; GISEL-NEXT:    ; implicit-def: $vgpr10
+; GISEL-NEXT:  ; %bb.3: ; %Flow6
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; GISEL-NEXT:    s_cbranch_execz .LBB3_15
+; GISEL-NEXT:  ; %bb.4: ; %NodeBlock
+; GISEL-NEXT:    v_sub_u32_e32 v9, 0x80, v10
+; GISEL-NEXT:    v_cmp_le_i32_e32 vcc, 55, v9
+; GISEL-NEXT:    s_mov_b64 s[10:11], 0
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:  ; %bb.5: ; %LeafBlock1
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 55, v9
+; GISEL-NEXT:    s_andn2_b64 s[4:5], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.6: ; %Flow3
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; GISEL-NEXT:  ; %bb.7: ; %LeafBlock
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 54, v9
+; GISEL-NEXT:    s_andn2_b64 s[10:11], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, -1
+; GISEL-NEXT:    s_or_b64 s[10:11], s[10:11], s[14:15]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.8: ; %Flow4
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    v_mov_b32_e32 v7, v3
+; GISEL-NEXT:    v_mov_b32_e32 v6, v2
+; GISEL-NEXT:    v_mov_b32_e32 v5, v1
+; GISEL-NEXT:    v_mov_b32_e32 v4, v0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:    s_cbranch_execz .LBB3_10
+; GISEL-NEXT:  ; %bb.9: ; %itofp-sw-default
+; GISEL-NEXT:    v_sub_u32_e32 v13, 0x49, v10
+; GISEL-NEXT:    v_sub_u32_e32 v6, 64, v13
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], v13, v[0:1]
+; GISEL-NEXT:    v_lshlrev_b64 v[6:7], v6, v[2:3]
+; GISEL-NEXT:    v_subrev_u32_e32 v14, 64, v13
+; GISEL-NEXT:    v_lshrrev_b64 v[11:12], v13, v[2:3]
+; GISEL-NEXT:    v_or_b32_e32 v6, v4, v6
+; GISEL-NEXT:    v_or_b32_e32 v7, v5, v7
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], v14, v[2:3]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v13
+; GISEL-NEXT:    v_add_u32_e32 v14, 55, v10
+; GISEL-NEXT:    v_cndmask_b32_e32 v5, v5, v7, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v7, 0, v12, vcc
+; GISEL-NEXT:    v_sub_u32_e32 v12, 64, v14
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, v4, v6, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v13
+; GISEL-NEXT:    v_cndmask_b32_e32 v6, 0, v11, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[10:11], v14, -1
+; GISEL-NEXT:    v_lshlrev_b64 v[12:13], v12, -1
+; GISEL-NEXT:    v_subrev_u32_e32 v15, 64, v14
+; GISEL-NEXT:    v_or_b32_e32 v16, v10, v12
+; GISEL-NEXT:    v_or_b32_e32 v17, v11, v13
+; GISEL-NEXT:    v_lshrrev_b64 v[12:13], v15, -1
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v14
+; GISEL-NEXT:    v_cndmask_b32_e64 v4, v4, v0, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v5, v5, v1, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, v12, v16, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v13, v13, v17, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v14
+; GISEL-NEXT:    v_cndmask_b32_e32 v10, 0, v10, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, 0, v11, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v12, v12, -1, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v13, v13, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v10, v10, v2
+; GISEL-NEXT:    v_and_b32_e32 v11, v11, v3
+; GISEL-NEXT:    v_and_or_b32 v10, v12, v0, v10
+; GISEL-NEXT:    v_and_or_b32 v11, v13, v1, v11
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[10:11]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[10:11], exec
+; GISEL-NEXT:    v_cndmask_b32_e64 v10, 0, 1, vcc
+; GISEL-NEXT:    s_and_b64 s[10:11], exec, 0
+; GISEL-NEXT:    v_or_b32_e32 v4, v4, v10
+; GISEL-NEXT:    s_or_b64 s[10:11], s[4:5], s[10:11]
+; GISEL-NEXT:  .LBB3_10: ; %Flow5
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; GISEL-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; GISEL-NEXT:    v_lshlrev_b64 v[6:7], 1, v[2:3]
+; GISEL-NEXT:    v_lshlrev_b64 v[4:5], 1, v[0:1]
+; GISEL-NEXT:    v_lshrrev_b32_e32 v0, 31, v1
+; GISEL-NEXT:    v_or_b32_e32 v6, v6, v0
+; GISEL-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:    v_bfe_u32 v0, v4, 2, 1
+; GISEL-NEXT:    v_or_b32_e32 v0, v4, v0
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; GISEL-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v5, vcc
+; GISEL-NEXT:    v_addc_co_u32_e32 v2, vcc, 0, v6, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], 2, v[0:1]
+; GISEL-NEXT:    v_addc_co_u32_e32 v3, vcc, 0, v7, vcc
+; GISEL-NEXT:    v_mov_b32_e32 v5, 0
+; GISEL-NEXT:    v_and_b32_e32 v6, 0x800000, v1
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[5:6]
+; GISEL-NEXT:    v_lshlrev_b64 v[5:6], 30, v[2:3]
+; GISEL-NEXT:    v_lshrrev_b32_e32 v6, 2, v1
+; GISEL-NEXT:    v_or_b32_e32 v6, v6, v5
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:  ; %bb.13: ; %itofp-if-then20
+; GISEL-NEXT:    v_lshlrev_b64 v[2:3], 29, v[2:3]
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], 3, v[0:1]
+; GISEL-NEXT:    v_lshrrev_b32_e32 v0, 3, v1
+; GISEL-NEXT:    v_or_b32_e32 v6, v0, v2
+; GISEL-NEXT:    v_mov_b32_e32 v8, v9
+; GISEL-NEXT:  ; %bb.14: ; %Flow
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:  .LBB3_15: ; %Flow7
+; GISEL-NEXT:    s_or_b64 exec, exec, s[8:9]
+; GISEL-NEXT:    v_mov_b32_e32 v0, 0x3ff00000
+; GISEL-NEXT:    v_lshl_add_u32 v0, v8, 20, v0
+; GISEL-NEXT:    v_and_b32_e32 v1, 0xfffff, v6
+; GISEL-NEXT:    v_and_or_b32 v4, v4, -1, 0
+; GISEL-NEXT:    v_or3_b32 v5, v1, v0, 0
+; GISEL-NEXT:  .LBB3_16: ; %Flow8
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:    v_mov_b32_e32 v0, v4
+; GISEL-NEXT:    v_mov_b32_e32 v1, v5
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = uitofp i128 %x to double
+  ret double %cvt
+}
+
+define half @sitofp_i128_to_f16(i128 %x) {
+; SDAG-LABEL: sitofp_i128_to_f16:
+; SDAG:       ; %bb.0: ; %itofp-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_or_b32_e32 v5, v1, v3
+; SDAG-NEXT:    v_or_b32_e32 v4, v0, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v4, 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB4_16
+; SDAG-NEXT:  ; %bb.1: ; %itofp-if-end
+; SDAG-NEXT:    v_ashrrev_i32_e32 v5, 31, v3
+; SDAG-NEXT:    v_xor_b32_e32 v0, v5, v0
+; SDAG-NEXT:    v_xor_b32_e32 v1, v5, v1
+; SDAG-NEXT:    v_sub_co_u32_e32 v0, vcc, v0, v5
+; SDAG-NEXT:    v_xor_b32_e32 v2, v5, v2
+; SDAG-NEXT:    v_subb_co_u32_e32 v1, vcc, v1, v5, vcc
+; SDAG-NEXT:    v_xor_b32_e32 v6, v5, v3
+; SDAG-NEXT:    v_subb_co_u32_e32 v4, vcc, v2, v5, vcc
+; SDAG-NEXT:    v_subb_co_u32_e32 v5, vcc, v6, v5, vcc
+; SDAG-NEXT:    v_ffbh_u32_e32 v2, v4
+; SDAG-NEXT:    v_add_u32_e32 v2, 32, v2
+; SDAG-NEXT:    v_ffbh_u32_e32 v6, v5
+; SDAG-NEXT:    v_min_u32_e32 v2, v2, v6
+; SDAG-NEXT:    v_ffbh_u32_e32 v6, v0
+; SDAG-NEXT:    v_add_u32_e32 v6, 32, v6
+; SDAG-NEXT:    v_ffbh_u32_e32 v7, v1
+; SDAG-NEXT:    v_min_u32_e32 v6, v6, v7
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    v_add_u32_e32 v6, 64, v6
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, v6, v2, vcc
+; SDAG-NEXT:    v_sub_u32_e32 v2, 0x7f, v9
+; SDAG-NEXT:    v_cmp_gt_i32_e32 vcc, 25, v2
+; SDAG-NEXT:    ; implicit-def: $vgpr6
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; SDAG-NEXT:  ; %bb.2: ; %itofp-if-else
+; SDAG-NEXT:    v_add_u32_e32 v4, 0xffffff98, v9
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v4, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v4
+; SDAG-NEXT:    v_cndmask_b32_e32 v6, 0, v0, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr9
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:  ; %bb.3: ; %Flow6
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; SDAG-NEXT:    s_cbranch_execz .LBB4_15
+; SDAG-NEXT:  ; %bb.4: ; %NodeBlock
+; SDAG-NEXT:    v_sub_u32_e32 v8, 0x80, v9
+; SDAG-NEXT:    v_cmp_lt_i32_e32 vcc, 25, v8
+; SDAG-NEXT:    s_mov_b64 s[10:11], 0
+; SDAG-NEXT:    s_mov_b64 s[4:5], 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:  ; %bb.5: ; %LeafBlock1
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 26, v8
+; SDAG-NEXT:    s_and_b64 s[4:5], vcc, exec
+; SDAG-NEXT:  ; %bb.6: ; %Flow3
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; SDAG-NEXT:  ; %bb.7: ; %LeafBlock
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 25, v8
+; SDAG-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; SDAG-NEXT:    s_and_b64 s[14:15], vcc, exec
+; SDAG-NEXT:    s_mov_b64 s[10:11], exec
+; SDAG-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; SDAG-NEXT:  ; %bb.8: ; %Flow4
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    v_mov_b32_e32 v7, v1
+; SDAG-NEXT:    v_mov_b32_e32 v6, v0
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB4_10
+; SDAG-NEXT:  ; %bb.9: ; %itofp-sw-default
+; SDAG-NEXT:    v_sub_u32_e32 v12, 0x66, v9
+; SDAG-NEXT:    v_sub_u32_e32 v10, 64, v12
+; SDAG-NEXT:    v_lshrrev_b64 v[6:7], v12, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[10:11], v10, v[4:5]
+; SDAG-NEXT:    v_sub_u32_e32 v13, 38, v9
+; SDAG-NEXT:    v_or_b32_e32 v11, v7, v11
+; SDAG-NEXT:    v_or_b32_e32 v10, v6, v10
+; SDAG-NEXT:    v_lshrrev_b64 v[6:7], v13, v[4:5]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v12
+; SDAG-NEXT:    v_add_u32_e32 v14, 26, v9
+; SDAG-NEXT:    v_cndmask_b32_e32 v7, v7, v11, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v12
+; SDAG-NEXT:    v_cndmask_b32_e32 v6, v6, v10, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[10:11], v13, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[12:13], v14, v[4:5]
+; SDAG-NEXT:    v_subrev_u32_e32 v9, 38, v9
+; SDAG-NEXT:    v_cndmask_b32_e64 v15, v6, v0, s[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v6, v13, v11
+; SDAG-NEXT:    v_or_b32_e32 v11, v12, v10
+; SDAG-NEXT:    v_lshlrev_b64 v[9:10], v9, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v14
+; SDAG-NEXT:    v_cndmask_b32_e64 v7, v7, v1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v6, v10, v6, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v14
+; SDAG-NEXT:    v_cndmask_b32_e64 v10, v6, v5, s[4:5]
+; SDAG-NEXT:    v_lshlrev_b64 v[5:6], v14, v[0:1]
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, v9, v11, vcc
+; SDAG-NEXT:    v_cndmask_b32_e64 v4, v9, v4, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v6, 0, v6, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, 0, v5, vcc
+; SDAG-NEXT:    v_or_b32_e32 v5, v6, v10
+; SDAG-NEXT:    v_or_b32_e32 v4, v9, v4
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    s_andn2_b64 s[10:11], s[10:11], exec
+; SDAG-NEXT:    v_cndmask_b32_e64 v4, 0, 1, vcc
+; SDAG-NEXT:    v_or_b32_e32 v6, v15, v4
+; SDAG-NEXT:  .LBB4_10: ; %Flow5
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; SDAG-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; SDAG-NEXT:    v_lshlrev_b64 v[6:7], 1, v[0:1]
+; SDAG-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:    v_lshrrev_b32_e32 v0, 2, v6
+; SDAG-NEXT:    v_and_or_b32 v0, v0, 1, v6
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v7, vcc
+; SDAG-NEXT:    v_and_b32_e32 v4, 0x4000000, v0
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v4
+; SDAG-NEXT:    v_alignbit_b32 v6, v1, v0, 2
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:  ; %bb.13: ; %itofp-if-then20
+; SDAG-NEXT:    v_alignbit_b32 v6, v1, v0, 3
+; SDAG-NEXT:    v_mov_b32_e32 v2, v8
+; SDAG-NEXT:  ; %bb.14: ; %Flow
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:  .LBB4_15: ; %Flow7
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    v_and_b32_e32 v0, 0x80000000, v3
+; SDAG-NEXT:    v_lshl_add_u32 v1, v2, 23, 1.0
+; SDAG-NEXT:    v_and_b32_e32 v2, 0x7fffff, v6
+; SDAG-NEXT:    v_or3_b32 v0, v2, v0, v1
+; SDAG-NEXT:    v_cvt_f16_f32_e32 v4, v0
+; SDAG-NEXT:  .LBB4_16: ; %Flow8
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v4
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: sitofp_i128_to_f16:
+; GISEL:       ; %bb.0: ; %itofp-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_or_b32_e32 v4, v0, v2
+; GISEL-NEXT:    v_or_b32_e32 v5, v1, v3
+; GISEL-NEXT:    s_mov_b32 s4, 0
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; GISEL-NEXT:    v_mov_b32_e32 v4, s4
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB4_16
+; GISEL-NEXT:  ; %bb.1: ; %itofp-if-end
+; GISEL-NEXT:    v_ashrrev_i32_e32 v8, 31, v3
+; GISEL-NEXT:    v_xor_b32_e32 v0, v8, v0
+; GISEL-NEXT:    v_xor_b32_e32 v1, v8, v1
+; GISEL-NEXT:    v_sub_co_u32_e32 v0, vcc, v0, v8
+; GISEL-NEXT:    v_xor_b32_e32 v2, v8, v2
+; GISEL-NEXT:    v_subb_co_u32_e32 v1, vcc, v1, v8, vcc
+; GISEL-NEXT:    v_xor_b32_e32 v3, v8, v3
+; GISEL-NEXT:    v_subb_co_u32_e32 v6, vcc, v2, v8, vcc
+; GISEL-NEXT:    v_subb_co_u32_e32 v7, vcc, v3, v8, vcc
+; GISEL-NEXT:    v_ffbh_u32_e32 v3, v0
+; GISEL-NEXT:    v_ffbh_u32_e32 v2, v1
+; GISEL-NEXT:    v_add_u32_e32 v3, 32, v3
+; GISEL-NEXT:    v_ffbh_u32_e32 v4, v6
+; GISEL-NEXT:    v_min_u32_e32 v2, v2, v3
+; GISEL-NEXT:    v_ffbh_u32_e32 v3, v7
+; GISEL-NEXT:    v_add_u32_e32 v4, 32, v4
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[6:7]
+; GISEL-NEXT:    v_add_u32_e32 v2, 64, v2
+; GISEL-NEXT:    v_min_u32_e32 v3, v3, v4
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, v3, v2, vcc
+; GISEL-NEXT:    v_sub_u32_e32 v9, 0x7f, v11
+; GISEL-NEXT:    v_cmp_ge_i32_e32 vcc, 24, v9
+; GISEL-NEXT:    ; implicit-def: $vgpr2
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; GISEL-NEXT:  ; %bb.2: ; %itofp-if-else
+; GISEL-NEXT:    v_add_u32_e32 v2, 0xffffff98, v11
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, 0, v0, vcc
+; GISEL-NEXT:    ; implicit-def: $vgpr0
+; GISEL-NEXT:    ; implicit-def: $vgpr11
+; GISEL-NEXT:    ; implicit-def: $vgpr6
+; GISEL-NEXT:  ; %bb.3: ; %Flow6
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; GISEL-NEXT:    s_cbranch_execz .LBB4_15
+; GISEL-NEXT:  ; %bb.4: ; %NodeBlock
+; GISEL-NEXT:    v_sub_u32_e32 v10, 0x80, v11
+; GISEL-NEXT:    v_cmp_le_i32_e32 vcc, 26, v10
+; GISEL-NEXT:    s_mov_b64 s[10:11], 0
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:  ; %bb.5: ; %LeafBlock1
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 26, v10
+; GISEL-NEXT:    s_andn2_b64 s[4:5], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.6: ; %Flow3
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; GISEL-NEXT:  ; %bb.7: ; %LeafBlock
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 25, v10
+; GISEL-NEXT:    s_andn2_b64 s[10:11], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, -1
+; GISEL-NEXT:    s_or_b64 s[10:11], s[10:11], s[14:15]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.8: ; %Flow4
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    v_mov_b32_e32 v5, v3
+; GISEL-NEXT:    v_mov_b32_e32 v4, v2
+; GISEL-NEXT:    v_mov_b32_e32 v3, v1
+; GISEL-NEXT:    v_mov_b32_e32 v2, v0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:    s_cbranch_execz .LBB4_10
+; GISEL-NEXT:  ; %bb.9: ; %itofp-sw-default
+; GISEL-NEXT:    v_sub_u32_e32 v12, 0x66, v11
+; GISEL-NEXT:    v_sub_u32_e32 v4, 64, v12
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v12, v[0:1]
+; GISEL-NEXT:    v_lshlrev_b64 v[4:5], v4, v[6:7]
+; GISEL-NEXT:    v_subrev_u32_e32 v13, 64, v12
+; GISEL-NEXT:    v_or_b32_e32 v4, v2, v4
+; GISEL-NEXT:    v_or_b32_e32 v5, v3, v5
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v13, v[6:7]
+; GISEL-NEXT:    v_add_u32_e32 v13, 26, v11
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v12
+; GISEL-NEXT:    v_sub_u32_e32 v11, 64, v13
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v2, v4, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v3, v5, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v12
+; GISEL-NEXT:    v_lshrrev_b64 v[4:5], v13, -1
+; GISEL-NEXT:    v_lshlrev_b64 v[11:12], v11, -1
+; GISEL-NEXT:    v_subrev_u32_e32 v14, 64, v13
+; GISEL-NEXT:    v_or_b32_e32 v15, v4, v11
+; GISEL-NEXT:    v_or_b32_e32 v16, v5, v12
+; GISEL-NEXT:    v_lshrrev_b64 v[11:12], v14, -1
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v2, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v3, v1, vcc
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v13
+; GISEL-NEXT:    v_cndmask_b32_e32 v11, v11, v15, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, v12, v16, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v13
+; GISEL-NEXT:    v_cndmask_b32_e32 v4, 0, v4, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v5, 0, v5, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v11, v11, -1, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v12, v12, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v4, v4, v6
+; GISEL-NEXT:    v_and_b32_e32 v5, v5, v7
+; GISEL-NEXT:    v_and_or_b32 v4, v11, v0, v4
+; GISEL-NEXT:    v_and_or_b32 v5, v12, v1, v5
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[10:11], exec
+; GISEL-NEXT:    v_cndmask_b32_e64 v4, 0, 1, vcc
+; GISEL-NEXT:    s_and_b64 s[10:11], exec, 0
+; GISEL-NEXT:    v_or_b32_e32 v2, v2, v4
+; GISEL-NEXT:    s_or_b64 s[10:11], s[4:5], s[10:11]
+; GISEL-NEXT:  .LBB4_10: ; %Flow5
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; GISEL-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; GISEL-NEXT:    v_lshlrev_b64 v[2:3], 1, v[0:1]
+; GISEL-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:    v_bfe_u32 v0, v2, 2, 1
+; GISEL-NEXT:    v_or_b32_e32 v0, v2, v0
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; GISEL-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v3, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], 2, v[0:1]
+; GISEL-NEXT:    v_and_b32_e32 v3, 0x4000000, v0
+; GISEL-NEXT:    v_mov_b32_e32 v4, 0
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[3:4]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:  ; %bb.13: ; %itofp-if-then20
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], 3, v[0:1]
+; GISEL-NEXT:    v_mov_b32_e32 v9, v10
+; GISEL-NEXT:  ; %bb.14: ; %Flow
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:  .LBB4_15: ; %Flow7
+; GISEL-NEXT:    s_or_b64 exec, exec, s[8:9]
+; GISEL-NEXT:    v_and_b32_e32 v0, 0x80000000, v8
+; GISEL-NEXT:    v_lshl_add_u32 v1, v9, 23, 1.0
+; GISEL-NEXT:    v_and_b32_e32 v2, 0x7fffff, v2
+; GISEL-NEXT:    v_or3_b32 v0, v2, v0, v1
+; GISEL-NEXT:    v_cvt_f16_f32_e32 v4, v0
+; GISEL-NEXT:  .LBB4_16: ; %Flow8
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:    v_mov_b32_e32 v0, v4
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = sitofp i128 %x to half
+  ret half %cvt
+}
+
+define half @uitofp_i128_to_f16(i128 %x) {
+; SDAG-LABEL: uitofp_i128_to_f16:
+; SDAG:       ; %bb.0: ; %itofp-entry
+; SDAG-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; SDAG-NEXT:    v_or_b32_e32 v5, v1, v3
+; SDAG-NEXT:    v_or_b32_e32 v4, v0, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[4:5]
+; SDAG-NEXT:    v_mov_b32_e32 v4, 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; SDAG-NEXT:    s_cbranch_execz .LBB5_16
+; SDAG-NEXT:  ; %bb.1: ; %itofp-if-end
+; SDAG-NEXT:    v_ffbh_u32_e32 v4, v2
+; SDAG-NEXT:    v_add_u32_e32 v4, 32, v4
+; SDAG-NEXT:    v_ffbh_u32_e32 v5, v3
+; SDAG-NEXT:    v_min_u32_e32 v4, v4, v5
+; SDAG-NEXT:    v_ffbh_u32_e32 v5, v0
+; SDAG-NEXT:    v_add_u32_e32 v5, 32, v5
+; SDAG-NEXT:    v_ffbh_u32_e32 v6, v1
+; SDAG-NEXT:    v_min_u32_e32 v5, v5, v6
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    v_add_u32_e32 v5, 64, v5
+; SDAG-NEXT:    v_cndmask_b32_e32 v8, v5, v4, vcc
+; SDAG-NEXT:    v_sub_u32_e32 v6, 0x7f, v8
+; SDAG-NEXT:    v_mov_b32_e32 v5, v1
+; SDAG-NEXT:    v_cmp_gt_i32_e32 vcc, 25, v6
+; SDAG-NEXT:    v_mov_b32_e32 v4, v0
+; SDAG-NEXT:    ; implicit-def: $vgpr9
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; SDAG-NEXT:  ; %bb.2: ; %itofp-if-else
+; SDAG-NEXT:    v_add_u32_e32 v2, 0xffffff98, v8
+; SDAG-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; SDAG-NEXT:    v_cndmask_b32_e32 v9, 0, v0, vcc
+; SDAG-NEXT:    ; implicit-def: $vgpr0_vgpr1
+; SDAG-NEXT:    ; implicit-def: $vgpr8
+; SDAG-NEXT:    ; implicit-def: $vgpr2_vgpr3
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:  ; %bb.3: ; %Flow6
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; SDAG-NEXT:    s_cbranch_execz .LBB5_15
+; SDAG-NEXT:  ; %bb.4: ; %NodeBlock
+; SDAG-NEXT:    v_sub_u32_e32 v7, 0x80, v8
+; SDAG-NEXT:    v_cmp_lt_i32_e32 vcc, 25, v7
+; SDAG-NEXT:    s_mov_b64 s[10:11], 0
+; SDAG-NEXT:    s_mov_b64 s[4:5], 0
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:  ; %bb.5: ; %LeafBlock1
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 26, v7
+; SDAG-NEXT:    s_and_b64 s[4:5], vcc, exec
+; SDAG-NEXT:  ; %bb.6: ; %Flow3
+; SDAG-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; SDAG-NEXT:  ; %bb.7: ; %LeafBlock
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 25, v7
+; SDAG-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; SDAG-NEXT:    s_and_b64 s[14:15], vcc, exec
+; SDAG-NEXT:    s_mov_b64 s[10:11], exec
+; SDAG-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; SDAG-NEXT:    ; implicit-def: $vgpr4_vgpr5
+; SDAG-NEXT:  ; %bb.8: ; %Flow4
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; SDAG-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; SDAG-NEXT:    s_cbranch_execz .LBB5_10
+; SDAG-NEXT:  ; %bb.9: ; %itofp-sw-default
+; SDAG-NEXT:    v_sub_u32_e32 v11, 0x66, v8
+; SDAG-NEXT:    v_sub_u32_e32 v9, 64, v11
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], v11, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[9:10], v9, v[2:3]
+; SDAG-NEXT:    v_sub_u32_e32 v12, 38, v8
+; SDAG-NEXT:    v_or_b32_e32 v10, v5, v10
+; SDAG-NEXT:    v_or_b32_e32 v9, v4, v9
+; SDAG-NEXT:    v_lshrrev_b64 v[4:5], v12, v[2:3]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v11
+; SDAG-NEXT:    v_add_u32_e32 v13, 26, v8
+; SDAG-NEXT:    v_cndmask_b32_e32 v5, v5, v10, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v11
+; SDAG-NEXT:    v_cndmask_b32_e32 v4, v4, v9, vcc
+; SDAG-NEXT:    v_lshrrev_b64 v[9:10], v12, v[0:1]
+; SDAG-NEXT:    v_lshlrev_b64 v[11:12], v13, v[2:3]
+; SDAG-NEXT:    v_subrev_u32_e32 v8, 38, v8
+; SDAG-NEXT:    v_cndmask_b32_e64 v14, v4, v0, s[4:5]
+; SDAG-NEXT:    v_or_b32_e32 v4, v12, v10
+; SDAG-NEXT:    v_or_b32_e32 v10, v11, v9
+; SDAG-NEXT:    v_lshlrev_b64 v[8:9], v8, v[0:1]
+; SDAG-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v13
+; SDAG-NEXT:    v_cndmask_b32_e64 v5, v5, v1, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v4, v9, v4, vcc
+; SDAG-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v13
+; SDAG-NEXT:    v_cndmask_b32_e64 v9, v4, v3, s[4:5]
+; SDAG-NEXT:    v_lshlrev_b64 v[3:4], v13, v[0:1]
+; SDAG-NEXT:    v_cndmask_b32_e32 v8, v8, v10, vcc
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, v8, v2, s[4:5]
+; SDAG-NEXT:    v_cndmask_b32_e32 v4, 0, v4, vcc
+; SDAG-NEXT:    v_cndmask_b32_e32 v8, 0, v3, vcc
+; SDAG-NEXT:    v_or_b32_e32 v3, v4, v9
+; SDAG-NEXT:    v_or_b32_e32 v2, v8, v2
+; SDAG-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; SDAG-NEXT:    s_andn2_b64 s[10:11], s[10:11], exec
+; SDAG-NEXT:    v_cndmask_b32_e64 v2, 0, 1, vcc
+; SDAG-NEXT:    v_or_b32_e32 v4, v14, v2
+; SDAG-NEXT:  .LBB5_10: ; %Flow5
+; SDAG-NEXT:    s_or_b64 exec, exec, s[12:13]
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; SDAG-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; SDAG-NEXT:    v_lshlrev_b64 v[4:5], 1, v[0:1]
+; SDAG-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:    v_lshrrev_b32_e32 v0, 2, v4
+; SDAG-NEXT:    v_and_or_b32 v0, v0, 1, v4
+; SDAG-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; SDAG-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v5, vcc
+; SDAG-NEXT:    v_and_b32_e32 v2, 0x4000000, v0
+; SDAG-NEXT:    v_cmp_ne_u32_e32 vcc, 0, v2
+; SDAG-NEXT:    v_alignbit_b32 v9, v1, v0, 2
+; SDAG-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; SDAG-NEXT:  ; %bb.13: ; %itofp-if-then20
+; SDAG-NEXT:    v_alignbit_b32 v9, v1, v0, 3
+; SDAG-NEXT:    v_mov_b32_e32 v6, v7
+; SDAG-NEXT:  ; %bb.14: ; %Flow
+; SDAG-NEXT:    s_or_b64 exec, exec, s[4:5]
+; SDAG-NEXT:  .LBB5_15: ; %Flow7
+; SDAG-NEXT:    s_or_b64 exec, exec, s[8:9]
+; SDAG-NEXT:    v_and_b32_e32 v0, 0x7fffff, v9
+; SDAG-NEXT:    v_lshl_or_b32 v0, v6, 23, v0
+; SDAG-NEXT:    v_add_u32_e32 v0, 1.0, v0
+; SDAG-NEXT:    v_cvt_f16_f32_e32 v4, v0
+; SDAG-NEXT:  .LBB5_16: ; %Flow8
+; SDAG-NEXT:    s_or_b64 exec, exec, s[6:7]
+; SDAG-NEXT:    v_mov_b32_e32 v0, v4
+; SDAG-NEXT:    s_setpc_b64 s[30:31]
+;
+; GISEL-LABEL: uitofp_i128_to_f16:
+; GISEL:       ; %bb.0: ; %itofp-entry
+; GISEL-NEXT:    s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
+; GISEL-NEXT:    v_mov_b32_e32 v4, v2
+; GISEL-NEXT:    v_mov_b32_e32 v5, v3
+; GISEL-NEXT:    v_or_b32_e32 v2, v0, v4
+; GISEL-NEXT:    v_or_b32_e32 v3, v1, v5
+; GISEL-NEXT:    s_mov_b32 s4, 0
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; GISEL-NEXT:    v_mov_b32_e32 v2, s4
+; GISEL-NEXT:    s_and_saveexec_b64 s[6:7], vcc
+; GISEL-NEXT:    s_cbranch_execz .LBB5_16
+; GISEL-NEXT:  ; %bb.1: ; %itofp-if-end
+; GISEL-NEXT:    v_ffbh_u32_e32 v3, v0
+; GISEL-NEXT:    v_ffbh_u32_e32 v2, v1
+; GISEL-NEXT:    v_add_u32_e32 v3, 32, v3
+; GISEL-NEXT:    v_ffbh_u32_e32 v6, v4
+; GISEL-NEXT:    v_min_u32_e32 v2, v2, v3
+; GISEL-NEXT:    v_ffbh_u32_e32 v3, v5
+; GISEL-NEXT:    v_add_u32_e32 v6, 32, v6
+; GISEL-NEXT:    v_cmp_eq_u64_e32 vcc, 0, v[4:5]
+; GISEL-NEXT:    v_add_u32_e32 v2, 64, v2
+; GISEL-NEXT:    v_min_u32_e32 v3, v3, v6
+; GISEL-NEXT:    v_cndmask_b32_e32 v12, v3, v2, vcc
+; GISEL-NEXT:    v_sub_u32_e32 v10, 0x7f, v12
+; GISEL-NEXT:    v_cmp_ge_i32_e32 vcc, 24, v10
+; GISEL-NEXT:    ; implicit-def: $vgpr2
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:    s_xor_b64 s[4:5], exec, s[4:5]
+; GISEL-NEXT:  ; %bb.2: ; %itofp-if-else
+; GISEL-NEXT:    v_add_u32_e32 v2, 0xffffff98, v12
+; GISEL-NEXT:    v_lshlrev_b64 v[0:1], v2, v[0:1]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v2
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, 0, v0, vcc
+; GISEL-NEXT:    ; implicit-def: $vgpr0
+; GISEL-NEXT:    ; implicit-def: $vgpr12
+; GISEL-NEXT:    ; implicit-def: $vgpr4
+; GISEL-NEXT:  ; %bb.3: ; %Flow6
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[8:9], s[4:5]
+; GISEL-NEXT:    s_cbranch_execz .LBB5_15
+; GISEL-NEXT:  ; %bb.4: ; %NodeBlock
+; GISEL-NEXT:    v_sub_u32_e32 v11, 0x80, v12
+; GISEL-NEXT:    v_cmp_le_i32_e32 vcc, 26, v11
+; GISEL-NEXT:    s_mov_b64 s[10:11], 0
+; GISEL-NEXT:    s_mov_b64 s[4:5], 0
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], vcc
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:  ; %bb.5: ; %LeafBlock1
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 26, v11
+; GISEL-NEXT:    s_andn2_b64 s[4:5], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.6: ; %Flow3
+; GISEL-NEXT:    s_andn2_saveexec_b64 s[12:13], s[12:13]
+; GISEL-NEXT:  ; %bb.7: ; %LeafBlock
+; GISEL-NEXT:    v_cmp_ne_u32_e32 vcc, 25, v11
+; GISEL-NEXT:    s_andn2_b64 s[10:11], 0, exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, -1
+; GISEL-NEXT:    s_or_b64 s[10:11], s[10:11], s[14:15]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[4:5], exec
+; GISEL-NEXT:    s_and_b64 s[14:15], exec, vcc
+; GISEL-NEXT:    s_or_b64 s[4:5], s[4:5], s[14:15]
+; GISEL-NEXT:  ; %bb.8: ; %Flow4
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    v_mov_b32_e32 v9, v3
+; GISEL-NEXT:    v_mov_b32_e32 v7, v1
+; GISEL-NEXT:    v_mov_b32_e32 v6, v0
+; GISEL-NEXT:    v_mov_b32_e32 v8, v2
+; GISEL-NEXT:    s_and_saveexec_b64 s[12:13], s[4:5]
+; GISEL-NEXT:    s_xor_b64 s[12:13], exec, s[12:13]
+; GISEL-NEXT:    s_cbranch_execz .LBB5_10
+; GISEL-NEXT:  ; %bb.9: ; %itofp-sw-default
+; GISEL-NEXT:    v_sub_u32_e32 v8, 0x66, v12
+; GISEL-NEXT:    v_sub_u32_e32 v6, 64, v8
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v8, v[0:1]
+; GISEL-NEXT:    v_lshlrev_b64 v[6:7], v6, v[4:5]
+; GISEL-NEXT:    v_subrev_u32_e32 v9, 64, v8
+; GISEL-NEXT:    v_or_b32_e32 v6, v2, v6
+; GISEL-NEXT:    v_or_b32_e32 v7, v3, v7
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v9, v[4:5]
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v8
+; GISEL-NEXT:    v_add_u32_e32 v12, 26, v12
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, v2, v6, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, v3, v7, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e32 vcc, 0, v8
+; GISEL-NEXT:    v_sub_u32_e32 v8, 64, v12
+; GISEL-NEXT:    v_cndmask_b32_e32 v6, v2, v0, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v7, v3, v1, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], v12, -1
+; GISEL-NEXT:    v_lshlrev_b64 v[8:9], v8, -1
+; GISEL-NEXT:    v_subrev_u32_e32 v13, 64, v12
+; GISEL-NEXT:    v_or_b32_e32 v14, v2, v8
+; GISEL-NEXT:    v_or_b32_e32 v15, v3, v9
+; GISEL-NEXT:    v_lshrrev_b64 v[8:9], v13, -1
+; GISEL-NEXT:    v_cmp_gt_u32_e32 vcc, 64, v12
+; GISEL-NEXT:    v_cndmask_b32_e32 v8, v8, v14, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v9, v9, v15, vcc
+; GISEL-NEXT:    v_cmp_eq_u32_e64 s[4:5], 0, v12
+; GISEL-NEXT:    v_cndmask_b32_e32 v2, 0, v2, vcc
+; GISEL-NEXT:    v_cndmask_b32_e32 v3, 0, v3, vcc
+; GISEL-NEXT:    v_cndmask_b32_e64 v8, v8, -1, s[4:5]
+; GISEL-NEXT:    v_cndmask_b32_e64 v9, v9, -1, s[4:5]
+; GISEL-NEXT:    v_and_b32_e32 v2, v2, v4
+; GISEL-NEXT:    v_and_b32_e32 v3, v3, v5
+; GISEL-NEXT:    v_and_or_b32 v2, v8, v0, v2
+; GISEL-NEXT:    v_and_or_b32 v3, v9, v1, v3
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[2:3]
+; GISEL-NEXT:    s_andn2_b64 s[4:5], s[10:11], exec
+; GISEL-NEXT:    v_cndmask_b32_e64 v2, 0, 1, vcc
+; GISEL-NEXT:    s_and_b64 s[10:11], exec, 0
+; GISEL-NEXT:    v_or_b32_e32 v6, v6, v2
+; GISEL-NEXT:    s_or_b64 s[10:11], s[4:5], s[10:11]
+; GISEL-NEXT:  .LBB5_10: ; %Flow5
+; GISEL-NEXT:    s_or_b64 exec, exec, s[12:13]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], s[10:11]
+; GISEL-NEXT:  ; %bb.11: ; %itofp-sw-bb
+; GISEL-NEXT:    v_lshlrev_b64 v[6:7], 1, v[0:1]
+; GISEL-NEXT:  ; %bb.12: ; %itofp-sw-epilog
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:    v_bfe_u32 v0, v6, 2, 1
+; GISEL-NEXT:    v_or_b32_e32 v0, v6, v0
+; GISEL-NEXT:    v_add_co_u32_e32 v0, vcc, 1, v0
+; GISEL-NEXT:    v_addc_co_u32_e32 v1, vcc, 0, v7, vcc
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], 2, v[0:1]
+; GISEL-NEXT:    v_and_b32_e32 v3, 0x4000000, v0
+; GISEL-NEXT:    v_mov_b32_e32 v4, 0
+; GISEL-NEXT:    v_cmp_ne_u64_e32 vcc, 0, v[3:4]
+; GISEL-NEXT:    s_and_saveexec_b64 s[4:5], vcc
+; GISEL-NEXT:  ; %bb.13: ; %itofp-if-then20
+; GISEL-NEXT:    v_lshrrev_b64 v[2:3], 3, v[0:1]
+; GISEL-NEXT:    v_mov_b32_e32 v10, v11
+; GISEL-NEXT:  ; %bb.14: ; %Flow
+; GISEL-NEXT:    s_or_b64 exec, exec, s[4:5]
+; GISEL-NEXT:  .LBB5_15: ; %Flow7
+; GISEL-NEXT:    s_or_b64 exec, exec, s[8:9]
+; GISEL-NEXT:    v_lshl_add_u32 v0, v10, 23, 1.0
+; GISEL-NEXT:    v_mov_b32_e32 v1, 0x7fffff
+; GISEL-NEXT:    v_and_or_b32 v0, v2, v1, v0
+; GISEL-NEXT:    v_cvt_f16_f32_e32 v2, v0
+; GISEL-NEXT:  .LBB5_16: ; %Flow8
+; GISEL-NEXT:    s_or_b64 exec, exec, s[6:7]
+; GISEL-NEXT:    v_mov_b32_e32 v0, v2
+; GISEL-NEXT:    s_setpc_b64 s[30:31]
+  %cvt = uitofp i128 %x to half
+  ret half %cvt
+}
+
+; FIXME: ExpandLargeFpConvert asserts on bfloat
+; define bfloat @sitofp_i128_to_bf16(i128 %x) {
+;   %cvt = sitofp i128 %x to bfloat
+;   ret bfloat %cvt
+; }
+
+; define bfloat @uitofp_i128_to_bf16(i128 %x) {
+;   %cvt = uitofp i128 %x to bfloat
+;   ret bfloat %cvt
+; }
+;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
+; GCN: {{.*}}


        


More information about the llvm-commits mailing list