[llvm] [LLVM][NVPTX] Add codegen support for tcgen05.{ld, st} instructions (PR #126740)

Pradeep Kumar via llvm-commits llvm-commits at lists.llvm.org
Tue Feb 11 07:02:55 PST 2025


https://github.com/schwarzschild-radius created https://github.com/llvm/llvm-project/pull/126740

This commit adds support for tcgen05.{ld, st} instructions with lit tests under tcgen05-ld.ll and tcgen05-st.ll and intrinsics documentation under NVPTXUsage.rst

>From 56e2a96146f4b6639e227561830c681050ca5ce7 Mon Sep 17 00:00:00 2001
From: pradeepku <pradeepku at nvidia.com>
Date: Wed, 15 May 2024 04:49:18 +0530
Subject: [PATCH] [LLVM][NVPTX] Add codegen support for tcgen05.{ld, st}
 instructions

This commit adds support for tcgen05.{ld, st} instructions with lit tests under
tcgen05-ld.ll and tcgen05-st.ll and intrinsics documentation under NVPTXUsage.rst
---
 llvm/docs/NVPTXUsage.rst                    |  95 ++
 llvm/include/llvm/IR/Intrinsics.td          |   1 +
 llvm/include/llvm/IR/IntrinsicsNVVM.td      |  65 ++
 llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.cpp | 302 ++++++
 llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.h   |   2 +
 llvm/lib/Target/NVPTX/NVPTXISelLowering.cpp | 416 +++++++++
 llvm/lib/Target/NVPTX/NVPTXIntrinsics.td    |  86 ++
 llvm/test/CodeGen/NVPTX/tcgen05-ld.ll       | 335 +++++++
 llvm/test/CodeGen/NVPTX/tcgen05-st.ll       | 981 ++++++++++++++++++++
 9 files changed, 2283 insertions(+)
 create mode 100644 llvm/test/CodeGen/NVPTX/tcgen05-ld.ll
 create mode 100644 llvm/test/CodeGen/NVPTX/tcgen05-st.ll

diff --git a/llvm/docs/NVPTXUsage.rst b/llvm/docs/NVPTXUsage.rst
index 1680b11433537..d5aed5b3e904e 100644
--- a/llvm/docs/NVPTXUsage.rst
+++ b/llvm/docs/NVPTXUsage.rst
@@ -1175,6 +1175,101 @@ For more information, refer to the PTX ISA
 `<https://docs.nvidia.com/cuda/parallel-thread-execution/#tensorcore-5th-generation-instructions-tcgen05-fence>`_.
 
 
+'``llvm.nvvm.tcgen05.ld.*``'
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Syntax:
+"""""""
+
+.. code-block:: llvm
+
+  declare <n x i32> @llvm.nvvm.tcgen05.ld.<shape>.<num>(ptr addrspace(6) %tmem_addr, i1 %pack)
+
+  declare <n x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.<num>(ptr addrspace(6) %tmem_addr, i64 %offset, i1 %pack)
+
+Overview:
+"""""""""
+
+This group of intrinsics asynchronously load data from the Tensor Memory at the location specified
+by the 32-bit address operand `tmem_addr` into the destination registers, collectively across all threads
+of the warps.
+
+All the threads in the warp must specify the same value of `tmem_addr`, which must be the base address
+of the collective load operation. Otherwise, the behavior is undefined.
+
+The `shape` qualifier and the `num` qualifier together determines the total dimension of the data ('n') which
+is loaded from the Tensor Memory. The `shape` qualifier indicates the base dimension of data. The `num` qualifier
+indicates the repeat factor on the base dimension resulting in the total dimension of the data that is accessed.
+
+Allowed values for the 'num' are `x1, x2, x4, x8, x16, x32, x64, x128`.
+
+Allowed values for the 'shape' in the first intrinsic are `16x64b, 16x128b, 16x256b, 32x32b`.
+
+Allowed value for the 'shape' in the second intrinsic is `16x32bx2`.
+
+The result of the intrinsic is a vector consisting of one or more 32-bit registers derived from `shape` and
+`num` as shown below.
+
+=========== =========================  ==========  ==========
+ num/shape     16x32bx2/16x64b/32x32b    16x128b    16x256b
+=========== =========================  ==========  ==========
+ x1                 1                      2           4
+ x2                 2                      4           8
+ x4                 4                      8           16
+ x8                 8                      16          32
+ x16                16                     32          64
+ x32                32                     64          128
+ x64                64                     128         NA
+ x128               128                    NA          NA
+=========== =========================  ==========  ==========
+
+The last argument `i1 %pack` is a compile-time constant which when set, indicates that the adjacent columns are packed into a single 32-bit element during the load
+
+For more information, refer to the
+`PTX ISA <https://docs.nvidia.com/cuda/parallel-thread-execution/#tcgen05-instructions-tcgen05-ld>`__.
+
+
+'``llvm.nvvm.tcgen05.st.*``'
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Syntax:
+"""""""
+
+.. code-block:: llvm
+
+  declare void @llvm.nvvm.tcgen05.st.<shape>.<num>(ptr addrspace(6) %tmem_addr, <n x i32> %args, i1 %unpack)
+
+  declare void @llvm.nvvm.tcgen05.st.16x32bx2.<num>(ptr addrspace(6) %tmem_addr, <n x i32> %args, i64 %offset, i1 %unpack)
+
+Overview:
+"""""""""
+
+This group of intrinsics asynchronously store data from the source vector into the Tensor Memory at the location
+specified by the 32-bit address operand 'tmem_addr` collectively across all threads of the warps.
+
+All the threads in the warp must specify the same value of `tmem_addr`, which must be the base address of the
+collective load operation. Otherwise, the behavior is undefined.
+
+The `shape` qualifier and the `num` qualifier together determines the total dimension of the data ('n') which
+is loaded from the Tensor Memory. The `shape` qualifier indicates the base dimension of data. The `num` qualifier
+indicates the repeat factor on the base dimension resulting in the total dimension of the data that is accessed.
+
+Allowed values for the 'num' are `x1, x2, x4, x8, x16, x32, x64, x128`.
+
+Allowed values for the 'shape' in the first intrinsic are `16x64b, 16x128b, 16x256b, 32x32b`.
+
+Allowed value for the 'shape' in the second intrinsic is `16x32bx2`.
+
+`args` argument is a vector consisting of one or more 32-bit registers derived from `shape` and
+`num` as listed in the table listed in the `tcgen05.ld` section.
+
+Each shape support an `unpack` mode to allow a 32-bit element in the register to be unpacked into two 16-bit elements and store them in adjacent columns. `unpack` mode can be enabled by setting the `%unpack` operand to 1 and can be disabled by setting it to 0.
+
+The last argument `i1 %unpack` is a compile-time constant which when set, indicates that a 32-bit element in the register to be unpacked into two 16-bit elements and store them in adjacent columns.
+
+For more information, refer to the
+`PTX ISA <https://docs.nvidia.com/cuda/parallel-thread-execution/#tcgen05-instructions-tcgen05-st>`__.
+
 Other Intrinsics
 ----------------
 
diff --git a/llvm/include/llvm/IR/Intrinsics.td b/llvm/include/llvm/IR/Intrinsics.td
index 14ecae41ff08f..62239ca705b9e 100644
--- a/llvm/include/llvm/IR/Intrinsics.td
+++ b/llvm/include/llvm/IR/Intrinsics.td
@@ -564,6 +564,7 @@ def llvm_v8i32_ty      : LLVMType<v8i32>;    //  8 x i32
 def llvm_v16i32_ty     : LLVMType<v16i32>;   // 16 x i32
 def llvm_v32i32_ty     : LLVMType<v32i32>;   // 32 x i32
 def llvm_v64i32_ty     : LLVMType<v64i32>;   // 64 x i32
+def llvm_v128i32_ty    : LLVMType<v128i32>;  //128 x i32
 def llvm_v256i32_ty    : LLVMType<v256i32>;  //256 x i32
 
 def llvm_v1i64_ty      : LLVMType<v1i64>;    //  1 x i64
diff --git a/llvm/include/llvm/IR/IntrinsicsNVVM.td b/llvm/include/llvm/IR/IntrinsicsNVVM.td
index 6af1f2a166773..f6bfa575c8e01 100644
--- a/llvm/include/llvm/IR/IntrinsicsNVVM.td
+++ b/llvm/include/llvm/IR/IntrinsicsNVVM.td
@@ -656,6 +656,35 @@ class CP_ASYNC_BULK_TENSOR_REDUCE_INTR<int dim, string mode, string op> {
         ImmArg<ArgIndex<FlagsStartIdx>>];
 }
 
+class NVVM_TCGEN05_LDST_NAME<string Op, string Shape, int Num> {
+  string intr =   "llvm.nvvm.tcgen05." # Op
+                  # "." # Shape
+                  # "." # "x" # !shl(1, Num);
+
+  string record = !subst(".", "_",
+                  !subst("llvm.", "int_", intr));
+}
+
+
+class NVVM_TCGEN05_LDST_ACCESS_SIZE<string Shape, int Num> {
+  int shift = !cond(!eq(Shape, "16x128b"): 1,
+                    !eq(Shape, "16x256b"): 2,
+                    true : 0);
+
+  int veclen = !shl(1, !add(Num, shift));
+
+  int valid = !le(veclen, 128);
+  LLVMType type = !cond(!eq(veclen,   1): llvm_i32_ty,
+                        !eq(veclen,   2): llvm_v2i32_ty,
+                        !eq(veclen,   4): llvm_v4i32_ty,
+                        !eq(veclen,   8): llvm_v8i32_ty,
+                        !eq(veclen,  16): llvm_v16i32_ty,
+                        !eq(veclen,  32): llvm_v32i32_ty,
+                        !eq(veclen,  64): llvm_v64i32_ty,
+                        !eq(veclen, 128): llvm_v128i32_ty,
+                        true : llvm_void_ty);
+}
+
 let TargetPrefix = "nvvm" in {
   def int_nvvm_prmt : ClangBuiltin<"__nvvm_prmt">,
       DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
@@ -5138,4 +5167,40 @@ def int_nvvm_tcgen05_fence_before_thread_sync : Intrinsic<[], [],
 def int_nvvm_tcgen05_fence_after_thread_sync : Intrinsic<[], [],
   [IntrNoMem, IntrHasSideEffects]>;
 
+// Tcgen05 ld
+class NVVM_TCGEN05_LD<string Shape, int Num> :
+        Intrinsic<[NVVM_TCGEN05_LDST_ACCESS_SIZE<Shape, Num>.type],
+                  !listconcat([llvm_tmem_ptr_ty],
+                              !if(!eq(Shape, "16x32bx2"), [llvm_i64_ty], []),
+                              [llvm_i1_ty]),
+                  !listconcat([IntrConvergent, IntrArgMemOnly, NoCapture<ArgIndex<0>>],
+                              !if(!eq(Shape, "16x32bx2"),
+                                [ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>],
+                                [ImmArg<ArgIndex<1>>])),
+                  NVVM_TCGEN05_LDST_NAME<"ld", Shape, Num>.intr>;
+
+// Tcgen05 st
+class NVVM_TCGEN05_ST<string Shape, int Num> :
+        Intrinsic<[],
+                  !listconcat([llvm_tmem_ptr_ty],
+                              !if(!eq(Shape, "16x32bx2"), [llvm_i64_ty], []),
+                              [NVVM_TCGEN05_LDST_ACCESS_SIZE<Shape, Num>.type],
+                              [llvm_i1_ty]),
+                  !listconcat([IntrConvergent, IntrArgMemOnly, NoCapture<ArgIndex<0>>],
+                              !if(!eq(Shape, "16x32bx2"),
+                                [ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<3>>],
+                                [ImmArg<ArgIndex<2>>])),
+                  NVVM_TCGEN05_LDST_NAME<"st", Shape, Num>.intr>;
+
+foreach shape = ["16x64b", "16x128b", "16x256b", "32x32b", "16x32bx2"] in {
+  foreach num = !range(0, 8) in {
+    if NVVM_TCGEN05_LDST_ACCESS_SIZE<shape, num>.valid then {
+      def NVVM_TCGEN05_LDST_NAME<"ld", shape, num>.record:
+            NVVM_TCGEN05_LD<shape, num>;
+      def NVVM_TCGEN05_LDST_NAME<"st", shape, num>.record:
+            NVVM_TCGEN05_ST<shape, num>;
+      }
+  }
+}
+
 } // let TargetPrefix = "nvvm"
diff --git a/llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.cpp b/llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.cpp
index e96c1758676a1..4a51316c2be66 100644
--- a/llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.cpp
+++ b/llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.cpp
@@ -203,6 +203,109 @@ void NVPTXDAGToDAGISel::Select(SDNode *N) {
   SelectCode(N);
 }
 
+#define TCGEN05_LD_OPCODE(SHAPE, NUM)                                          \
+  (enablePack ? NVPTX::TCGEN05_LD_##SHAPE##_##NUM##_PACK                       \
+              : NVPTX::TCGEN05_LD_##SHAPE##_##NUM)
+
+static unsigned getTcgen05LdOpcode(unsigned IID, bool enablePack) {
+  switch (IID) {
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x1:
+    return TCGEN05_LD_OPCODE(16x64b, x1);
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x2:
+    return TCGEN05_LD_OPCODE(16x64b, x2);
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x4:
+    return TCGEN05_LD_OPCODE(16x64b, x4);
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x8:
+    return TCGEN05_LD_OPCODE(16x64b, x8);
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x16:
+    return TCGEN05_LD_OPCODE(16x64b, x16);
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x32:
+    return TCGEN05_LD_OPCODE(16x64b, x32);
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x64:
+    return TCGEN05_LD_OPCODE(16x64b, x64);
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x128:
+    return TCGEN05_LD_OPCODE(16x64b, x128);
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x1:
+    return TCGEN05_LD_OPCODE(16x128b, x1);
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x2:
+    return TCGEN05_LD_OPCODE(16x128b, x2);
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x4:
+    return TCGEN05_LD_OPCODE(16x128b, x4);
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x8:
+    return TCGEN05_LD_OPCODE(16x128b, x8);
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x16:
+    return TCGEN05_LD_OPCODE(16x128b, x16);
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x32:
+    return TCGEN05_LD_OPCODE(16x128b, x32);
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x64:
+    return TCGEN05_LD_OPCODE(16x128b, x64);
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x1:
+    return TCGEN05_LD_OPCODE(16x256b, x1);
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x2:
+    return TCGEN05_LD_OPCODE(16x256b, x2);
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x4:
+    return TCGEN05_LD_OPCODE(16x256b, x4);
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x8:
+    return TCGEN05_LD_OPCODE(16x256b, x8);
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x16:
+    return TCGEN05_LD_OPCODE(16x256b, x16);
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x32:
+    return TCGEN05_LD_OPCODE(16x256b, x32);
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x1:
+    return TCGEN05_LD_OPCODE(16x32bx2, x1);
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x2:
+    return TCGEN05_LD_OPCODE(16x32bx2, x2);
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x4:
+    return TCGEN05_LD_OPCODE(16x32bx2, x4);
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x8:
+    return TCGEN05_LD_OPCODE(16x32bx2, x8);
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x16:
+    return TCGEN05_LD_OPCODE(16x32bx2, x16);
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x32:
+    return TCGEN05_LD_OPCODE(16x32bx2, x32);
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x64:
+    return TCGEN05_LD_OPCODE(16x32bx2, x64);
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x128:
+    return TCGEN05_LD_OPCODE(16x32bx2, x128);
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x1:
+    return TCGEN05_LD_OPCODE(32x32b, x1);
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x2:
+    return TCGEN05_LD_OPCODE(32x32b, x2);
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x4:
+    return TCGEN05_LD_OPCODE(32x32b, x4);
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x8:
+    return TCGEN05_LD_OPCODE(32x32b, x8);
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x16:
+    return TCGEN05_LD_OPCODE(32x32b, x16);
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x32:
+    return TCGEN05_LD_OPCODE(32x32b, x32);
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x64:
+    return TCGEN05_LD_OPCODE(32x32b, x64);
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x128:
+    return TCGEN05_LD_OPCODE(32x32b, x128);
+  }
+  llvm_unreachable("unhandled tcgen05.ld lowering");
+}
+
+void NVPTXDAGToDAGISel::SelectTcgen05Ld(SDNode *N, bool hasOffset) {
+  SDLoc DL(N);
+  unsigned IID = cast<ConstantSDNode>(N->getOperand(1))->getZExtValue();
+
+  if (hasOffset) {
+    bool enablePack = cast<ConstantSDNode>(N->getOperand(4))->getZExtValue();
+    auto OffsetNode = CurDAG->getTargetConstant(
+        cast<ConstantSDNode>(N->getOperand(3))->getZExtValue(), DL, MVT::i32);
+    ReplaceNode(N, CurDAG->getMachineNode(
+                       getTcgen05LdOpcode(IID, enablePack), DL, N->getVTList(),
+                       {N->getOperand(2), OffsetNode, N->getOperand(0)}));
+  } else {
+    bool enablePack = cast<ConstantSDNode>(N->getOperand(3))->getZExtValue();
+    ReplaceNode(N, CurDAG->getMachineNode(
+                       getTcgen05LdOpcode(IID, enablePack), DL, N->getVTList(),
+                       {N->getOperand(2), N->getOperand(0)}));
+  }
+}
+
 bool NVPTXDAGToDAGISel::tryIntrinsicChain(SDNode *N) {
   unsigned IID = N->getConstantOperandVal(1);
   switch (IID) {
@@ -212,6 +315,51 @@ bool NVPTXDAGToDAGISel::tryIntrinsicChain(SDNode *N) {
   case Intrinsic::nvvm_ldu_global_i:
   case Intrinsic::nvvm_ldu_global_p:
     return tryLDGLDU(N);
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x128:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x128: {
+    SelectTcgen05Ld(N);
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x1:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x128: {
+    SelectTcgen05Ld(N, /* hasOffset */ true);
+    return true;
+  }
   }
 }
 
@@ -3227,6 +3375,115 @@ void NVPTXDAGToDAGISel::SelectCpAsyncBulkPrefetchL2(SDNode *N) {
   ReplaceNode(N, CurDAG->getMachineNode(Opcode, DL, N->getVTList(), Ops));
 }
 
+#define TCGEN05_ST_OPCODE(SHAPE, NUM)                                          \
+  (enableUnpack ? NVPTX::TCGEN05_ST_##SHAPE##_##NUM##_UNPACK                   \
+                : NVPTX::TCGEN05_ST_##SHAPE##_##NUM)
+
+static unsigned getTcgen05StOpcode(unsigned IID, bool enableUnpack) {
+  switch (IID) {
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x1:
+    return TCGEN05_ST_OPCODE(16x64b, x1);
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x2:
+    return TCGEN05_ST_OPCODE(16x64b, x2);
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x4:
+    return TCGEN05_ST_OPCODE(16x64b, x4);
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x8:
+    return TCGEN05_ST_OPCODE(16x64b, x8);
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x16:
+    return TCGEN05_ST_OPCODE(16x64b, x16);
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x32:
+    return TCGEN05_ST_OPCODE(16x64b, x32);
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x64:
+    return TCGEN05_ST_OPCODE(16x64b, x64);
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x128:
+    return TCGEN05_ST_OPCODE(16x64b, x128);
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x1:
+    return TCGEN05_ST_OPCODE(16x128b, x1);
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x2:
+    return TCGEN05_ST_OPCODE(16x128b, x2);
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x4:
+    return TCGEN05_ST_OPCODE(16x128b, x4);
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x8:
+    return TCGEN05_ST_OPCODE(16x128b, x8);
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x16:
+    return TCGEN05_ST_OPCODE(16x128b, x16);
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x32:
+    return TCGEN05_ST_OPCODE(16x128b, x32);
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x64:
+    return TCGEN05_ST_OPCODE(16x128b, x64);
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x1:
+    return TCGEN05_ST_OPCODE(16x256b, x1);
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x2:
+    return TCGEN05_ST_OPCODE(16x256b, x2);
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x4:
+    return TCGEN05_ST_OPCODE(16x256b, x4);
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x8:
+    return TCGEN05_ST_OPCODE(16x256b, x8);
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x16:
+    return TCGEN05_ST_OPCODE(16x256b, x16);
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x32:
+    return TCGEN05_ST_OPCODE(16x256b, x32);
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x1:
+    return TCGEN05_ST_OPCODE(16x32bx2, x1);
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x2:
+    return TCGEN05_ST_OPCODE(16x32bx2, x2);
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x4:
+    return TCGEN05_ST_OPCODE(16x32bx2, x4);
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x8:
+    return TCGEN05_ST_OPCODE(16x32bx2, x8);
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x16:
+    return TCGEN05_ST_OPCODE(16x32bx2, x16);
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x32:
+    return TCGEN05_ST_OPCODE(16x32bx2, x32);
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x64:
+    return TCGEN05_ST_OPCODE(16x32bx2, x64);
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x128:
+    return TCGEN05_ST_OPCODE(16x32bx2, x128);
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x1:
+    return TCGEN05_ST_OPCODE(32x32b, x1);
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x2:
+    return TCGEN05_ST_OPCODE(32x32b, x2);
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x4:
+    return TCGEN05_ST_OPCODE(32x32b, x4);
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x8:
+    return TCGEN05_ST_OPCODE(32x32b, x8);
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x16:
+    return TCGEN05_ST_OPCODE(32x32b, x16);
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x32:
+    return TCGEN05_ST_OPCODE(32x32b, x32);
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x64:
+    return TCGEN05_ST_OPCODE(32x32b, x64);
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x128:
+    return TCGEN05_ST_OPCODE(32x32b, x128);
+  }
+  llvm_unreachable("unhandled tcgen05.st lowering");
+}
+
+void NVPTXDAGToDAGISel::SelectTcgen05St(SDNode *N, bool hasOffset) {
+  SDLoc DL(N);
+  unsigned IID = cast<ConstantSDNode>(N->getOperand(1))->getZExtValue();
+
+  SmallVector<SDValue, 128> Operands = {
+      N->getOperand(2) // taddr
+  };
+
+  if (hasOffset)
+    Operands.push_back(CurDAG->getTargetConstant(
+        cast<ConstantSDNode>(N->getOperand(3))->getZExtValue(), DL,
+        MVT::i32)); // Offset
+
+  for (unsigned I = hasOffset ? 4 : 3; I < (N->getNumOperands() - 1); I++)
+    Operands.push_back(N->getOperand(I));
+
+  bool enableUnpack =
+      cast<ConstantSDNode>(N->getOperand(N->getNumOperands() - 1))
+          ->getZExtValue();
+
+  Operands.push_back(N->getOperand(0)); // Chain
+  ReplaceNode(N, CurDAG->getMachineNode(getTcgen05StOpcode(IID, enableUnpack),
+                                        DL, N->getVTList(), Operands));
+}
+
 bool NVPTXDAGToDAGISel::tryIntrinsicVoid(SDNode *N) {
   unsigned IID = N->getConstantOperandVal(1);
   using TMARedTy = llvm::nvvm::TMAReductionOp;
@@ -3383,5 +3640,50 @@ bool NVPTXDAGToDAGISel::tryIntrinsicVoid(SDNode *N) {
     SelectCpAsyncBulkTensorReduceCommon(N, CastTy(TMARedTy::XOR),
                                         /*IsIm2Col=*/true);
     return true;
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x64:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x128:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x1:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x2:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x4:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x8:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x16:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x32:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x64:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x128:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x64:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x32: {
+    SelectTcgen05St(N);
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x64:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x128: {
+    SelectTcgen05St(N,/*  hasOffset */ true);
+    return true;
+  }
   }
 }
diff --git a/llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.h b/llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.h
index 8dc6bc86c6828..651823caa5223 100644
--- a/llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.h
+++ b/llvm/lib/Target/NVPTX/NVPTXISelDAGToDAG.h
@@ -99,6 +99,8 @@ class LLVM_LIBRARY_VISIBILITY NVPTXDAGToDAGISel : public SelectionDAGISel {
   void SelectCpAsyncBulkTensorPrefetchCommon(SDNode *N, bool IsIm2Col = false);
   void SelectCpAsyncBulkTensorReduceCommon(SDNode *N, unsigned RedOp,
                                            bool IsIm2Col = false);
+  void SelectTcgen05Ld(SDNode *N, bool hasOffset = false);
+  void SelectTcgen05St(SDNode *N, bool hasOffset = false);
 
   inline SDValue getI32Imm(unsigned Imm, const SDLoc &DL) {
     return CurDAG->getTargetConstant(Imm, DL, MVT::i32);
diff --git a/llvm/lib/Target/NVPTX/NVPTXISelLowering.cpp b/llvm/lib/Target/NVPTX/NVPTXISelLowering.cpp
index 58ad92a8934a6..5057c849ba5eb 100644
--- a/llvm/lib/Target/NVPTX/NVPTXISelLowering.cpp
+++ b/llvm/lib/Target/NVPTX/NVPTXISelLowering.cpp
@@ -998,6 +998,18 @@ NVPTXTargetLowering::NVPTXTargetLowering(const NVPTXTargetMachine &TM,
   setMinCmpXchgSizeInBits(STI.getMinCmpXchgSizeInBits());
   setMaxAtomicSizeInBitsSupported(64);
   setMaxDivRemBitWidthSupported(64);
+
+  // Custom lowering for tcgen05.ld vector operands
+  setOperationAction(ISD::INTRINSIC_W_CHAIN,
+                     {MVT::v2i32, MVT::v4i32, MVT::v8i32, MVT::v16i32,
+                      MVT::v32i32, MVT::v64i32, MVT::v128i32},
+                     Custom);
+
+  // Custom lowering for tcgen05.st vector operands
+  setOperationAction(ISD::INTRINSIC_VOID,
+                     {MVT::v2i32, MVT::v4i32, MVT::v8i32, MVT::v16i32,
+                      MVT::v32i32, MVT::v64i32, MVT::v128i32},
+                     Custom);
 }
 
 const char *NVPTXTargetLowering::getTargetNodeName(unsigned Opcode) const {
@@ -2635,6 +2647,84 @@ static SDValue LowerVectorArith(SDValue Op, SelectionDAG &DAG) {
   return V;
 }
 
+static SDValue LowerTcgen05St(SDValue Op, SelectionDAG &DAG) {
+  SDNode *N = Op.getNode();
+  SDLoc DL(N);
+  SmallVector<SDValue, 32> Ops;
+
+  // split the vector argument
+  for (size_t I = 0; I < N->getNumOperands(); I++) {
+    SDValue Val = N->getOperand(I);
+    EVT ValVT = Val.getValueType();
+    if (ValVT.isVector()) {
+      EVT EltVT = ValVT.getVectorElementType();
+      for (unsigned J = 0, NElts = ValVT.getVectorNumElements(); J < NElts; J++)
+        Ops.push_back(DAG.getNode(ISD::EXTRACT_VECTOR_ELT, DL, EltVT, Val,
+                                  DAG.getIntPtrConstant(J, DL)));
+    } else
+      Ops.push_back(Val);
+  }
+
+  MemIntrinsicSDNode *MemSD = cast<MemIntrinsicSDNode>(N);
+  SDValue Tcgen05StNode =
+      DAG.getMemIntrinsicNode(ISD::INTRINSIC_VOID, DL, N->getVTList(), Ops,
+                              MemSD->getMemoryVT(), MemSD->getMemOperand());
+
+  return Tcgen05StNode;
+}
+
+static SDValue LowerIntrinsicVoid(SDValue Op, SelectionDAG &DAG) {
+  SDNode *N = Op.getNode();
+  SDValue Intrin = N->getOperand(1);
+  SDLoc DL(N);
+
+  // Get the intrinsic ID
+  unsigned IntrinNo = cast<ConstantSDNode>(Intrin.getNode())->getZExtValue();
+  switch (IntrinNo) {
+  default:
+    break;
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x128:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x64:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x64:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x128:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x1:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x2:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x4:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x8:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x16:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x64:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x64:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x128:
+    return LowerTcgen05St(Op, DAG);
+  }
+  return Op;
+}
+
 SDValue
 NVPTXTargetLowering::LowerOperation(SDValue Op, SelectionDAG &DAG) const {
   switch (Op.getOpcode()) {
@@ -2646,6 +2736,8 @@ NVPTXTargetLowering::LowerOperation(SDValue Op, SelectionDAG &DAG) const {
     return LowerGlobalAddress(Op, DAG);
   case ISD::INTRINSIC_W_CHAIN:
     return Op;
+  case ISD::INTRINSIC_VOID:
+    return LowerIntrinsicVoid(Op, DAG);
   case ISD::BUILD_VECTOR:
     return LowerBUILD_VECTOR(Op, DAG);
   case ISD::BITCAST:
@@ -4245,6 +4337,240 @@ bool NVPTXTargetLowering::getTgtMemIntrinsic(
     Info.flags = MachineMemOperand::MOLoad;
     Info.align = Align(16);
     return true;
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x1:
+  {
+    Info.opc = ISD::INTRINSIC_W_CHAIN;
+    Info.memVT = MVT::v1i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOLoad;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x2:
+  {
+    Info.opc = ISD::INTRINSIC_W_CHAIN;
+    Info.memVT = MVT::v2i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOLoad;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x4:
+  {
+    Info.opc = ISD::INTRINSIC_W_CHAIN;
+    Info.memVT = MVT::v4i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOLoad;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x8:
+  {
+    Info.opc = ISD::INTRINSIC_W_CHAIN;
+    Info.memVT = MVT::v8i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOLoad;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x16:
+  {
+    Info.opc = ISD::INTRINSIC_W_CHAIN;
+    Info.memVT = MVT::v16i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOLoad;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x32:
+  {
+    Info.opc = ISD::INTRINSIC_W_CHAIN;
+    Info.memVT = MVT::v32i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOLoad;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x64:
+  {
+    Info.opc = ISD::INTRINSIC_W_CHAIN;
+    Info.memVT = MVT::v64i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOLoad;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x128:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x128:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x128:
+  {
+    Info.opc = ISD::INTRINSIC_W_CHAIN;
+    Info.memVT = MVT::v128i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOLoad;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x1:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x1:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x1:
+  {
+    Info.opc = ISD::INTRINSIC_VOID;
+    Info.memVT = MVT::i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOStore;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x1:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x2:
+  {
+    Info.opc = ISD::INTRINSIC_VOID;
+    Info.memVT = MVT::v2i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOStore;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x2:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x1:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x4:
+  {
+    Info.opc = ISD::INTRINSIC_VOID;
+    Info.memVT = MVT::v4i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOStore;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x4:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x2:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x8:
+  {
+    Info.opc = ISD::INTRINSIC_VOID;
+    Info.memVT = MVT::v8i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOStore;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x8:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x4:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x16:
+  {
+    Info.opc = ISD::INTRINSIC_VOID;
+    Info.memVT = MVT::v16i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOStore;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x16:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x8:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x32:
+  {
+    Info.opc = ISD::INTRINSIC_VOID;
+    Info.memVT = MVT::v32i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOStore;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x64:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x32:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x16:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x64:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x64:
+  {
+    Info.opc = ISD::INTRINSIC_VOID;
+    Info.memVT = MVT::v64i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOStore;
+    Info.align.reset();
+    return true;
+  }
+
+  case Intrinsic::nvvm_tcgen05_st_16x64b_x128:
+  case Intrinsic::nvvm_tcgen05_st_16x128b_x64:
+  case Intrinsic::nvvm_tcgen05_st_16x256b_x32:
+  case Intrinsic::nvvm_tcgen05_st_32x32b_x128:
+  case Intrinsic::nvvm_tcgen05_st_16x32bx2_x128:
+  {
+    Info.opc = ISD::INTRINSIC_VOID;
+    Info.memVT = MVT::v128i32;
+    Info.ptrVal = I.getArgOperand(0);
+    Info.offset = 0;
+    Info.flags = MachineMemOperand::MOStore;
+    Info.align.reset();
+    return true;
+  }
   }
   return false;
 }
@@ -5345,6 +5671,57 @@ static void ReplaceLoadVector(SDNode *N, SelectionDAG &DAG,
   Results.push_back(LoadChain);
 }
 
+// Lower vector return type of tcgen05.ld intrinsics
+static void ReplaceTcgen05Ld(SDNode *N, SelectionDAG &DAG,
+                             SmallVectorImpl<SDValue> &Results,
+                             bool hasOffset = false) {
+  SDLoc DL(N);
+  EVT ResVT = N->getValueType(0);
+  if (!ResVT.isVector())
+    return; // already legalized.
+
+  const unsigned NumElts = ResVT.getVectorNumElements();
+
+  // Create the return type of the instructions
+  SmallVector<EVT, 5> ListVTs;
+  for (unsigned i = 0; i < NumElts; ++i)
+    ListVTs.push_back(MVT::i32);
+
+  ListVTs.push_back(N->getValueType(1)); // Chain
+
+  SDVTList ResVTs = DAG.getVTList(ListVTs);
+
+  SmallVector<SDValue, 8> Ops;
+  // Add Chain and Intrinsic ID
+  Ops.push_back(N->getOperand(0)); // Chain
+  Ops.push_back(N->getOperand(1)); // Intrinsic ID
+  Ops.push_back(N->getOperand(2)); // taddr
+
+  if (hasOffset) {
+    Ops.push_back(N->getOperand(3)); // offset
+    Ops.push_back(N->getOperand(4)); // Pack flag
+  } else
+    Ops.push_back(N->getOperand(3)); // Pack flag
+
+  MemIntrinsicSDNode *MemSD = cast<MemIntrinsicSDNode>(N);
+  SDValue NewNode =
+      DAG.getMemIntrinsicNode(ISD::INTRINSIC_W_CHAIN, DL, ResVTs, Ops,
+                              MemSD->getMemoryVT(), MemSD->getMemOperand());
+
+  // split the vector result
+  SmallVector<SDValue, 4> ScalarRes;
+  for (unsigned i = 0; i < NumElts; ++i) {
+    SDValue Res = NewNode.getValue(i);
+    ScalarRes.push_back(Res);
+  }
+
+  SDValue Chain = NewNode.getValue(NumElts);
+  SDValue BuildVector = DAG.getNode(ISD::BUILD_VECTOR, DL, ResVT, ScalarRes);
+  Results.push_back(BuildVector); // Build Vector
+  Results.push_back(Chain);       // Chain
+  return;
+}
+
 static void ReplaceINTRINSIC_W_CHAIN(SDNode *N, SelectionDAG &DAG,
                                      SmallVectorImpl<SDValue> &Results) {
   SDValue Chain = N->getOperand(0);
@@ -5450,7 +5827,46 @@ static void ReplaceINTRINSIC_W_CHAIN(SDNode *N, SelectionDAG &DAG,
                                     NewLD.getValue(0)));
       Results.push_back(NewLD.getValue(1));
     }
+    return;
   }
+
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x64b_x128:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_32x32b_x128:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x128b_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x1:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x256b_x32:
+    return ReplaceTcgen05Ld(N, DAG, Results);
+
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x2:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x4:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x8:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x16:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x32:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x64:
+  case Intrinsic::nvvm_tcgen05_ld_16x32bx2_x128:
+    return ReplaceTcgen05Ld(N, DAG, Results, /* Offset */ true);
   }
 }
 
diff --git a/llvm/lib/Target/NVPTX/NVPTXIntrinsics.td b/llvm/lib/Target/NVPTX/NVPTXIntrinsics.td
index 39dac65d67eb9..716480c64b0b5 100644
--- a/llvm/lib/Target/NVPTX/NVPTXIntrinsics.td
+++ b/llvm/lib/Target/NVPTX/NVPTXIntrinsics.td
@@ -7697,3 +7697,89 @@ def tcgen05_fence_after_thread_sync: NVPTXInst<(outs), (ins),
   Requires<[hasTcgen05Instructions]>;
 
 } // hasSideEffects
+
+// name class for tcgen05.{ld, st}
+class NVVM_TCGEN05_LDST_INST_NAME<string Op, string shape, int lg2Count, bit packOrUnpack> {
+  string name = "TCGEN05_" # Op
+                # "_" # shape
+                # "_x" # !shl(1, lg2Count)
+                # !if(!eq(packOrUnpack, 1), !if(!eq(Op, "LD"), "_PACK", "_UNPACK"), "");
+}
+
+// reginfo class tcgen05.{ld, st}
+class NVVM_TCGEN05_LDST_REGINFO<int Veclen> {
+  // create a list of types for load/store operands
+  list<NVPTXRegClass> regs = !listsplat(Int32Regs, Veclen);
+  // generate list of regnames for load/store operands
+  list<string> reg_names = !foreach(x, !range(0, Veclen), "r" # x);
+  string regstring = "{{" # !interleave(!foreach(n, !range(0, Veclen), "$r" # n), ", ") # "}}";
+  dag Ins = !dag(ins, regs, reg_names);
+  dag Outs = !dag(outs, regs, reg_names);
+}
+
+//
+// tcgen05.ld.sync.aligned.shape.x[1, 2, 4, 8, 16, 32, 64, 128][|.pack::16b].[b32]
+//
+
+class NVVM_TCGEN05_LD_INST<string Shape, int Num, bit Pack> :
+               NVPTXInst<(outs), (ins), "?", []>,
+               Requires<[hasTcgen05Instructions]> {
+
+  NVVM_TCGEN05_LDST_REGINFO Info = NVVM_TCGEN05_LDST_REGINFO<
+                                    NVVM_TCGEN05_LDST_ACCESS_SIZE<Shape, Num>.veclen>;
+
+  let InOperandList = !con((ins Int32Regs:$taddr),
+                           !if(!eq(Shape, "16x32bx2"), (ins i64imm:$offset), (ins)));
+  let OutOperandList = Info.Outs;
+  let AsmString = "tcgen05.ld.sync.aligned"
+                  # "." # Shape
+                  # ".x" # !shl(1, Num)
+                  # !if(!eq(Pack, 1), ".pack::16b", "")
+                  # ".b32 "
+                  # Info.regstring # ", "
+                  # "[$taddr]"
+                  # !if(!eq(Shape, "16x32bx2"), ", $offset", "")
+                  # ";";
+}
+
+//
+// tcgen05.st.sync.aligned.shape.x[1, 2, 4, 8, 16, 32, 64, 128][|.unpack::16b].[b32]
+//
+
+class NVVM_TCGEN05_ST_INST<string Shape, int Num, bit Unpack> :
+               NVPTXInst<(outs), (ins), "?", []>,
+               Requires<[hasTcgen05Instructions]> {
+
+  NVVM_TCGEN05_LDST_REGINFO Info = NVVM_TCGEN05_LDST_REGINFO<
+                                    NVVM_TCGEN05_LDST_ACCESS_SIZE<Shape, Num>.veclen>;
+
+  let InOperandList = !con((ins Int32Regs:$taddr),
+                           !if(!eq(Shape, "16x32bx2"), (ins i64imm:$offset), (ins)),
+                           Info.Ins);
+  let OutOperandList = (outs);
+  let AsmString = "tcgen05.st.sync.aligned"
+                  # "." # Shape
+                  # ".x" # !shl(1, Num)
+                  # !if(!eq(Unpack, 1), ".unpack::16b", "")
+                  # ".b32 [$taddr]"
+                  # !if(!eq(Shape, "16x32bx2"), ", $offset", "")
+                  # ", " # Info.regstring
+                  # ";";
+}
+
+let isConvergent = true in {
+
+foreach shape = ["16x64b", "16x128b", "16x256b", "32x32b", "16x32bx2"] in {
+  foreach num = !range(0, 8) in {
+    foreach packOrUnpack = [false, true] in {
+      if NVVM_TCGEN05_LDST_ACCESS_SIZE<shape, num>.valid then {
+        def NVVM_TCGEN05_LDST_INST_NAME<"LD", shape, num, packOrUnpack>.name :
+              NVVM_TCGEN05_LD_INST<shape, num, packOrUnpack>;
+        def NVVM_TCGEN05_LDST_INST_NAME<"ST", shape, num, packOrUnpack>.name :
+              NVVM_TCGEN05_ST_INST<shape, num, packOrUnpack>;
+      }
+    }
+  }
+}
+
+} // isConvergent
diff --git a/llvm/test/CodeGen/NVPTX/tcgen05-ld.ll b/llvm/test/CodeGen/NVPTX/tcgen05-ld.ll
new file mode 100644
index 0000000000000..83dbcb1bc02b1
--- /dev/null
+++ b/llvm/test/CodeGen/NVPTX/tcgen05-ld.ll
@@ -0,0 +1,335 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
+; RUN: llc < %s -o - -mcpu=sm_100a -march=nvptx64 -mattr=+ptx86 | FileCheck %s
+; RUN: llc < %s -o - -mcpu=sm_101a -march=nvptx64 -mattr=+ptx86 | FileCheck %s
+; RUN: %if ptxas-12.8 %{ llc < %s -march=nvptx64 -mattr=+ptx86 -mcpu=sm_100a | %ptxas-verify -arch=sm_100a %}
+; RUN: %if ptxas-12.8 %{ llc < %s -march=nvptx64 -mattr=+ptx86 -mcpu=sm_101a | %ptxas-verify -arch=sm_101a %}
+
+; CHECK-LABEL: nvvm_tcgen05_ld_16x64b
+define void @nvvm_tcgen05_ld_16x64b(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_16x64b(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_16x64b_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x1.b32 {%r2}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x2.b32 {%r3, %r4}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x4.b32 {%r5, %r6, %r7, %r8}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x8.b32 {%r9, %r10, %r11, %r12, %r13, %r14, %r15, %r16}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x16.b32 {%r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29, %r30, %r31, %r32}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x32.b32 {%r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61, %r62, %r63, %r64}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x64.b32 {%r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125, %r126, %r127, %r128}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x128.b32 {%r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253, %r254, %r255, %r256}, [%r1];
+; CHECK-NEXT:    ret;
+  tail call i32 @llvm.nvvm.tcgen05.ld.16x64b.x1(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <2 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x2(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x4(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x8(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x16(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x32(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x64(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x128(ptr addrspace(6) %taddr, i1 0)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_ld_16x64b_pack
+define void @nvvm_tcgen05_ld_16x64b_pack(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_16x64b_pack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_16x64b_pack_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x1.pack::16b.b32 {%r2}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x2.pack::16b.b32 {%r3, %r4}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x4.pack::16b.b32 {%r5, %r6, %r7, %r8}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x8.pack::16b.b32 {%r9, %r10, %r11, %r12, %r13, %r14, %r15, %r16}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x16.pack::16b.b32 {%r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29, %r30, %r31, %r32}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x32.pack::16b.b32 {%r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61, %r62, %r63, %r64}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x64.pack::16b.b32 {%r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125, %r126, %r127, %r128}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x64b.x128.pack::16b.b32 {%r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253, %r254, %r255, %r256}, [%r1];
+; CHECK-NEXT:    ret;
+  tail call i32 @llvm.nvvm.tcgen05.ld.16x64b.x1(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <2 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x2(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x4(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x8(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x16(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x32(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x64(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.16x64b.x128(ptr addrspace(6) %taddr, i1 1)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_ld_16x128b
+define void @nvvm_tcgen05_ld_16x128b(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_16x128b(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<256>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_16x128b_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x1.b32 {%r2, %r3}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x2.b32 {%r4, %r5, %r6, %r7}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x4.b32 {%r8, %r9, %r10, %r11, %r12, %r13, %r14, %r15}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x8.b32 {%r16, %r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29, %r30, %r31}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x16.b32 {%r32, %r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61, %r62, %r63}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x32.b32 {%r64, %r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125, %r126, %r127}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x64.b32 {%r128, %r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253, %r254, %r255}, [%r1];
+; CHECK-NEXT:    ret;
+  tail call <2 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x1(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x2(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x4(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x8(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x16(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x32(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x64(ptr addrspace(6) %taddr, i1 0)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_ld_16x128b_pack
+define void @nvvm_tcgen05_ld_16x128b_pack(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_16x128b_pack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<256>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_16x128b_pack_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x1.pack::16b.b32 {%r2, %r3}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x2.pack::16b.b32 {%r4, %r5, %r6, %r7}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x4.pack::16b.b32 {%r8, %r9, %r10, %r11, %r12, %r13, %r14, %r15}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x8.pack::16b.b32 {%r16, %r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29, %r30, %r31}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x16.pack::16b.b32 {%r32, %r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61, %r62, %r63}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x32.pack::16b.b32 {%r64, %r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125, %r126, %r127}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x128b.x64.pack::16b.b32 {%r128, %r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253, %r254, %r255}, [%r1];
+; CHECK-NEXT:    ret;
+  tail call <2 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x1(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x2(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x4(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x8(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x16(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x32(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.16x128b.x64(ptr addrspace(6) %taddr, i1 1)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_ld_16x256b
+define void @nvvm_tcgen05_ld_16x256b(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_16x256b(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<254>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_16x256b_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x1.b32 {%r2, %r3, %r4, %r5}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x2.b32 {%r6, %r7, %r8, %r9, %r10, %r11, %r12, %r13}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x4.b32 {%r14, %r15, %r16, %r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x8.b32 {%r30, %r31, %r32, %r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x16.b32 {%r62, %r63, %r64, %r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x32.b32 {%r126, %r127, %r128, %r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253}, [%r1];
+; CHECK-NEXT:    ret;
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x1(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x2(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x4(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x8(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x16(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x32(ptr addrspace(6) %taddr, i1 0)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_ld_16x256b_pack
+define void @nvvm_tcgen05_ld_16x256b_pack(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_16x256b_pack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<254>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_16x256b_pack_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x1.pack::16b.b32 {%r2, %r3, %r4, %r5}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x2.pack::16b.b32 {%r6, %r7, %r8, %r9, %r10, %r11, %r12, %r13}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x4.pack::16b.b32 {%r14, %r15, %r16, %r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x8.pack::16b.b32 {%r30, %r31, %r32, %r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x16.pack::16b.b32 {%r62, %r63, %r64, %r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x256b.x32.pack::16b.b32 {%r126, %r127, %r128, %r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253}, [%r1];
+; CHECK-NEXT:    ret;
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x1(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x2(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x4(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x8(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x16(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.16x256b.x32(ptr addrspace(6) %taddr, i1 1)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_ld_32x32b
+define void @nvvm_tcgen05_ld_32x32b(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_32x32b(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_32x32b_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x1.b32 {%r2}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x2.b32 {%r3, %r4}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x4.b32 {%r5, %r6, %r7, %r8}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x8.b32 {%r9, %r10, %r11, %r12, %r13, %r14, %r15, %r16}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x16.b32 {%r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29, %r30, %r31, %r32}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x32.b32 {%r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61, %r62, %r63, %r64}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x64.b32 {%r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125, %r126, %r127, %r128}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x128.b32 {%r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253, %r254, %r255, %r256}, [%r1];
+; CHECK-NEXT:    ret;
+  tail call i32 @llvm.nvvm.tcgen05.ld.32x32b.x1(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <2 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x2(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x4(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x8(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x16(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x32(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x64(ptr addrspace(6) %taddr, i1 0)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x128(ptr addrspace(6) %taddr, i1 0)
+  ret void
+}
+
+define void @nvvm_tcgen05_ld_32x32b_pack(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_32x32b_pack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_32x32b_pack_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x1.pack::16b.b32 {%r2}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x2.pack::16b.b32 {%r3, %r4}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x4.pack::16b.b32 {%r5, %r6, %r7, %r8}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x8.pack::16b.b32 {%r9, %r10, %r11, %r12, %r13, %r14, %r15, %r16}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x16.pack::16b.b32 {%r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29, %r30, %r31, %r32}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x32.pack::16b.b32 {%r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61, %r62, %r63, %r64}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x64.pack::16b.b32 {%r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125, %r126, %r127, %r128}, [%r1];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.32x32b.x128.pack::16b.b32 {%r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253, %r254, %r255, %r256}, [%r1];
+; CHECK-NEXT:    ret;
+  tail call i32 @llvm.nvvm.tcgen05.ld.32x32b.x1(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <2 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x2(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x4(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x8(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x16(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x32(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x64(ptr addrspace(6) %taddr, i1 1)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.32x32b.x128(ptr addrspace(6) %taddr, i1 1)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_ld_16x32bx2
+define void @nvvm_tcgen05_ld_16x32bx2(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_16x32bx2(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_16x32bx2_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x1.b32 {%r2}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x2.b32 {%r3, %r4}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x4.b32 {%r5, %r6, %r7, %r8}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x8.b32 {%r9, %r10, %r11, %r12, %r13, %r14, %r15, %r16}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x16.b32 {%r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29, %r30, %r31, %r32}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x32.b32 {%r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61, %r62, %r63, %r64}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x64.b32 {%r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125, %r126, %r127, %r128}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x128.b32 {%r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253, %r254, %r255, %r256}, [%r1], 2;
+; CHECK-NEXT:    ret;
+  tail call i32 @llvm.nvvm.tcgen05.ld.16x32bx2.x1(ptr addrspace(6) %taddr, i64 2, i1 0)
+
+  tail call <2 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x2(ptr addrspace(6) %taddr, i64 2, i1 0)
+
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x4(ptr addrspace(6) %taddr, i64 2, i1 0)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x8(ptr addrspace(6) %taddr, i64 2, i1 0)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x16(ptr addrspace(6) %taddr, i64 2, i1 0)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x32(ptr addrspace(6) %taddr, i64 2, i1 0)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x64(ptr addrspace(6) %taddr, i64 2, i1 0)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x128(ptr addrspace(6) %taddr, i64 2, i1 0)
+  ret void
+}
+
+define void @nvvm_tcgen05_ld_16x32bx2_pack(ptr addrspace(6) %taddr) {
+; CHECK-LABEL: nvvm_tcgen05_ld_16x32bx2_pack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_ld_16x32bx2_pack_param_0];
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x1.pack::16b.b32 {%r2}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x2.pack::16b.b32 {%r3, %r4}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x4.pack::16b.b32 {%r5, %r6, %r7, %r8}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x8.pack::16b.b32 {%r9, %r10, %r11, %r12, %r13, %r14, %r15, %r16}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x16.pack::16b.b32 {%r17, %r18, %r19, %r20, %r21, %r22, %r23, %r24, %r25, %r26, %r27, %r28, %r29, %r30, %r31, %r32}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x32.pack::16b.b32 {%r33, %r34, %r35, %r36, %r37, %r38, %r39, %r40, %r41, %r42, %r43, %r44, %r45, %r46, %r47, %r48, %r49, %r50, %r51, %r52, %r53, %r54, %r55, %r56, %r57, %r58, %r59, %r60, %r61, %r62, %r63, %r64}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x64.pack::16b.b32 {%r65, %r66, %r67, %r68, %r69, %r70, %r71, %r72, %r73, %r74, %r75, %r76, %r77, %r78, %r79, %r80, %r81, %r82, %r83, %r84, %r85, %r86, %r87, %r88, %r89, %r90, %r91, %r92, %r93, %r94, %r95, %r96, %r97, %r98, %r99, %r100, %r101, %r102, %r103, %r104, %r105, %r106, %r107, %r108, %r109, %r110, %r111, %r112, %r113, %r114, %r115, %r116, %r117, %r118, %r119, %r120, %r121, %r122, %r123, %r124, %r125, %r126, %r127, %r128}, [%r1], 2;
+; CHECK-NEXT:    tcgen05.ld.sync.aligned.16x32bx2.x128.pack::16b.b32 {%r129, %r130, %r131, %r132, %r133, %r134, %r135, %r136, %r137, %r138, %r139, %r140, %r141, %r142, %r143, %r144, %r145, %r146, %r147, %r148, %r149, %r150, %r151, %r152, %r153, %r154, %r155, %r156, %r157, %r158, %r159, %r160, %r161, %r162, %r163, %r164, %r165, %r166, %r167, %r168, %r169, %r170, %r171, %r172, %r173, %r174, %r175, %r176, %r177, %r178, %r179, %r180, %r181, %r182, %r183, %r184, %r185, %r186, %r187, %r188, %r189, %r190, %r191, %r192, %r193, %r194, %r195, %r196, %r197, %r198, %r199, %r200, %r201, %r202, %r203, %r204, %r205, %r206, %r207, %r208, %r209, %r210, %r211, %r212, %r213, %r214, %r215, %r216, %r217, %r218, %r219, %r220, %r221, %r222, %r223, %r224, %r225, %r226, %r227, %r228, %r229, %r230, %r231, %r232, %r233, %r234, %r235, %r236, %r237, %r238, %r239, %r240, %r241, %r242, %r243, %r244, %r245, %r246, %r247, %r248, %r249, %r250, %r251, %r252, %r253, %r254, %r255, %r256}, [%r1], 2;
+; CHECK-NEXT:    ret;
+  tail call i32 @llvm.nvvm.tcgen05.ld.16x32bx2.x1(ptr addrspace(6) %taddr, i64 2, i1 1)
+
+  tail call <2 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x2(ptr addrspace(6) %taddr, i64 2, i1 1)
+
+  tail call <4 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x4(ptr addrspace(6) %taddr, i64 2, i1 1)
+
+  tail call <8 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x8(ptr addrspace(6) %taddr, i64 2, i1 1)
+
+  tail call <16 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x16(ptr addrspace(6) %taddr, i64 2, i1 1)
+
+  tail call <32 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x32(ptr addrspace(6) %taddr, i64 2, i1 1)
+
+  tail call <64 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x64(ptr addrspace(6) %taddr, i64 2, i1 1)
+
+  tail call <128 x i32> @llvm.nvvm.tcgen05.ld.16x32bx2.x128(ptr addrspace(6) %taddr, i64 2, i1 1)
+  ret void
+}
diff --git a/llvm/test/CodeGen/NVPTX/tcgen05-st.ll b/llvm/test/CodeGen/NVPTX/tcgen05-st.ll
new file mode 100644
index 0000000000000..c22f795193c7d
--- /dev/null
+++ b/llvm/test/CodeGen/NVPTX/tcgen05-st.ll
@@ -0,0 +1,981 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
+; RUN: llc < %s -o - -mcpu=sm_100a -march=nvptx64 -mattr=+ptx86 | FileCheck %s
+; RUN: llc < %s -o - -mcpu=sm_101a -march=nvptx64 -mattr=+ptx86 | FileCheck %s
+; RUN: %if ptxas-12.8 %{ llc < %s -march=nvptx64 -mcpu=sm_100a -mattr=+ptx86 | %ptxas-verify -arch=sm_100a %}
+; RUN: %if ptxas-12.8 %{ llc < %s -march=nvptx64 -mcpu=sm_101a -mattr=+ptx86 | %ptxas-verify -arch=sm_101a %}
+
+; CHECK-LABEL: nvvm_tcgen05_st_16x64b
+define void @nvvm_tcgen05_st_16x64b(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_16x64b(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_16x64b_param_0];
+; CHECK-NEXT:    ld.param.u32 %r2, [nvvm_tcgen05_st_16x64b_param_1];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x1.b32 [%r1], {%r2};
+; CHECK-NEXT:    ld.param.v2.u32 {%r3, %r4}, [nvvm_tcgen05_st_16x64b_param_2];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x2.b32 [%r1], {%r3, %r4};
+; CHECK-NEXT:    ld.param.v4.u32 {%r5, %r6, %r7, %r8}, [nvvm_tcgen05_st_16x64b_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x4.b32 [%r1], {%r5, %r6, %r7, %r8};
+; CHECK-NEXT:    ld.param.v4.u32 {%r9, %r10, %r11, %r12}, [nvvm_tcgen05_st_16x64b_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r13, %r14, %r15, %r16}, [nvvm_tcgen05_st_16x64b_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x8.b32 [%r1], {%r13, %r14, %r15, %r16, %r9, %r10, %r11, %r12};
+; CHECK-NEXT:    ld.param.v4.u32 {%r17, %r18, %r19, %r20}, [nvvm_tcgen05_st_16x64b_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r21, %r22, %r23, %r24}, [nvvm_tcgen05_st_16x64b_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r25, %r26, %r27, %r28}, [nvvm_tcgen05_st_16x64b_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r29, %r30, %r31, %r32}, [nvvm_tcgen05_st_16x64b_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x16.b32 [%r1], {%r29, %r30, %r31, %r32, %r25, %r26, %r27, %r28, %r21, %r22, %r23, %r24, %r17, %r18, %r19, %r20};
+; CHECK-NEXT:    ld.param.v4.u32 {%r33, %r34, %r35, %r36}, [nvvm_tcgen05_st_16x64b_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r37, %r38, %r39, %r40}, [nvvm_tcgen05_st_16x64b_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r41, %r42, %r43, %r44}, [nvvm_tcgen05_st_16x64b_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r45, %r46, %r47, %r48}, [nvvm_tcgen05_st_16x64b_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r49, %r50, %r51, %r52}, [nvvm_tcgen05_st_16x64b_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r53, %r54, %r55, %r56}, [nvvm_tcgen05_st_16x64b_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r57, %r58, %r59, %r60}, [nvvm_tcgen05_st_16x64b_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r61, %r62, %r63, %r64}, [nvvm_tcgen05_st_16x64b_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x32.b32 [%r1], {%r61, %r62, %r63, %r64, %r57, %r58, %r59, %r60, %r53, %r54, %r55, %r56, %r49, %r50, %r51, %r52, %r45, %r46, %r47, %r48, %r41, %r42, %r43, %r44, %r37, %r38, %r39, %r40, %r33, %r34, %r35, %r36};
+; CHECK-NEXT:    ld.param.v4.u32 {%r65, %r66, %r67, %r68}, [nvvm_tcgen05_st_16x64b_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r69, %r70, %r71, %r72}, [nvvm_tcgen05_st_16x64b_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r73, %r74, %r75, %r76}, [nvvm_tcgen05_st_16x64b_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r77, %r78, %r79, %r80}, [nvvm_tcgen05_st_16x64b_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r81, %r82, %r83, %r84}, [nvvm_tcgen05_st_16x64b_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r85, %r86, %r87, %r88}, [nvvm_tcgen05_st_16x64b_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r89, %r90, %r91, %r92}, [nvvm_tcgen05_st_16x64b_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r93, %r94, %r95, %r96}, [nvvm_tcgen05_st_16x64b_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r97, %r98, %r99, %r100}, [nvvm_tcgen05_st_16x64b_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r101, %r102, %r103, %r104}, [nvvm_tcgen05_st_16x64b_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r105, %r106, %r107, %r108}, [nvvm_tcgen05_st_16x64b_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r109, %r110, %r111, %r112}, [nvvm_tcgen05_st_16x64b_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r113, %r114, %r115, %r116}, [nvvm_tcgen05_st_16x64b_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r117, %r118, %r119, %r120}, [nvvm_tcgen05_st_16x64b_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r121, %r122, %r123, %r124}, [nvvm_tcgen05_st_16x64b_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r125, %r126, %r127, %r128}, [nvvm_tcgen05_st_16x64b_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x64.b32 [%r1], {%r125, %r126, %r127, %r128, %r121, %r122, %r123, %r124, %r117, %r118, %r119, %r120, %r113, %r114, %r115, %r116, %r109, %r110, %r111, %r112, %r105, %r106, %r107, %r108, %r101, %r102, %r103, %r104, %r97, %r98, %r99, %r100, %r93, %r94, %r95, %r96, %r89, %r90, %r91, %r92, %r85, %r86, %r87, %r88, %r81, %r82, %r83, %r84, %r77, %r78, %r79, %r80, %r73, %r74, %r75, %r76, %r69, %r70, %r71, %r72, %r65, %r66, %r67, %r68};
+; CHECK-NEXT:    ld.param.v4.u32 {%r129, %r130, %r131, %r132}, [nvvm_tcgen05_st_16x64b_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r133, %r134, %r135, %r136}, [nvvm_tcgen05_st_16x64b_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r137, %r138, %r139, %r140}, [nvvm_tcgen05_st_16x64b_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r141, %r142, %r143, %r144}, [nvvm_tcgen05_st_16x64b_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r145, %r146, %r147, %r148}, [nvvm_tcgen05_st_16x64b_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r149, %r150, %r151, %r152}, [nvvm_tcgen05_st_16x64b_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r153, %r154, %r155, %r156}, [nvvm_tcgen05_st_16x64b_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r157, %r158, %r159, %r160}, [nvvm_tcgen05_st_16x64b_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r161, %r162, %r163, %r164}, [nvvm_tcgen05_st_16x64b_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r165, %r166, %r167, %r168}, [nvvm_tcgen05_st_16x64b_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r169, %r170, %r171, %r172}, [nvvm_tcgen05_st_16x64b_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r173, %r174, %r175, %r176}, [nvvm_tcgen05_st_16x64b_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r177, %r178, %r179, %r180}, [nvvm_tcgen05_st_16x64b_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r181, %r182, %r183, %r184}, [nvvm_tcgen05_st_16x64b_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r185, %r186, %r187, %r188}, [nvvm_tcgen05_st_16x64b_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r189, %r190, %r191, %r192}, [nvvm_tcgen05_st_16x64b_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r193, %r194, %r195, %r196}, [nvvm_tcgen05_st_16x64b_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r197, %r198, %r199, %r200}, [nvvm_tcgen05_st_16x64b_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r201, %r202, %r203, %r204}, [nvvm_tcgen05_st_16x64b_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r205, %r206, %r207, %r208}, [nvvm_tcgen05_st_16x64b_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r209, %r210, %r211, %r212}, [nvvm_tcgen05_st_16x64b_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r213, %r214, %r215, %r216}, [nvvm_tcgen05_st_16x64b_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r217, %r218, %r219, %r220}, [nvvm_tcgen05_st_16x64b_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r221, %r222, %r223, %r224}, [nvvm_tcgen05_st_16x64b_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r225, %r226, %r227, %r228}, [nvvm_tcgen05_st_16x64b_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r229, %r230, %r231, %r232}, [nvvm_tcgen05_st_16x64b_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r233, %r234, %r235, %r236}, [nvvm_tcgen05_st_16x64b_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r237, %r238, %r239, %r240}, [nvvm_tcgen05_st_16x64b_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r241, %r242, %r243, %r244}, [nvvm_tcgen05_st_16x64b_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r245, %r246, %r247, %r248}, [nvvm_tcgen05_st_16x64b_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r249, %r250, %r251, %r252}, [nvvm_tcgen05_st_16x64b_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r253, %r254, %r255, %r256}, [nvvm_tcgen05_st_16x64b_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x128.b32 [%r1], {%r253, %r254, %r255, %r256, %r249, %r250, %r251, %r252, %r245, %r246, %r247, %r248, %r241, %r242, %r243, %r244, %r237, %r238, %r239, %r240, %r233, %r234, %r235, %r236, %r229, %r230, %r231, %r232, %r225, %r226, %r227, %r228, %r221, %r222, %r223, %r224, %r217, %r218, %r219, %r220, %r213, %r214, %r215, %r216, %r209, %r210, %r211, %r212, %r205, %r206, %r207, %r208, %r201, %r202, %r203, %r204, %r197, %r198, %r199, %r200, %r193, %r194, %r195, %r196, %r189, %r190, %r191, %r192, %r185, %r186, %r187, %r188, %r181, %r182, %r183, %r184, %r177, %r178, %r179, %r180, %r173, %r174, %r175, %r176, %r169, %r170, %r171, %r172, %r165, %r166, %r167, %r168, %r161, %r162, %r163, %r164, %r157, %r158, %r159, %r160, %r153, %r154, %r155, %r156, %r149, %r150, %r151, %r152, %r145, %r146, %r147, %r148, %r141, %r142, %r143, %r144, %r137, %r138, %r139, %r140, %r133, %r134, %r135, %r136, %r129, %r130, %r131, %r132};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x1(ptr addrspace(6) %taddr, i32 %stv1, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x2(ptr addrspace(6) %taddr, <2 x i32> %stv2, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x4(ptr addrspace(6) %taddr, <4 x i32> %stv4, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x8(ptr addrspace(6) %taddr, <8 x i32> %stv8, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x16(ptr addrspace(6) %taddr, <16 x i32> %stv16, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x32(ptr addrspace(6) %taddr, <32 x i32> %stv32, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x64(ptr addrspace(6) %taddr, <64 x i32> %stv64, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x128(ptr addrspace(6) %taddr, <128 x i32> %stv128, i1 0)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_16x64b_unpack
+define void @nvvm_tcgen05_st_16x64b_unpack(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_16x64b_unpack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_16x64b_unpack_param_0];
+; CHECK-NEXT:    ld.param.u32 %r2, [nvvm_tcgen05_st_16x64b_unpack_param_1];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x1.unpack::16b.b32 [%r1], {%r2};
+; CHECK-NEXT:    ld.param.v2.u32 {%r3, %r4}, [nvvm_tcgen05_st_16x64b_unpack_param_2];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x2.unpack::16b.b32 [%r1], {%r3, %r4};
+; CHECK-NEXT:    ld.param.v4.u32 {%r5, %r6, %r7, %r8}, [nvvm_tcgen05_st_16x64b_unpack_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x4.unpack::16b.b32 [%r1], {%r5, %r6, %r7, %r8};
+; CHECK-NEXT:    ld.param.v4.u32 {%r9, %r10, %r11, %r12}, [nvvm_tcgen05_st_16x64b_unpack_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r13, %r14, %r15, %r16}, [nvvm_tcgen05_st_16x64b_unpack_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x8.unpack::16b.b32 [%r1], {%r13, %r14, %r15, %r16, %r9, %r10, %r11, %r12};
+; CHECK-NEXT:    ld.param.v4.u32 {%r17, %r18, %r19, %r20}, [nvvm_tcgen05_st_16x64b_unpack_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r21, %r22, %r23, %r24}, [nvvm_tcgen05_st_16x64b_unpack_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r25, %r26, %r27, %r28}, [nvvm_tcgen05_st_16x64b_unpack_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r29, %r30, %r31, %r32}, [nvvm_tcgen05_st_16x64b_unpack_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x16.unpack::16b.b32 [%r1], {%r29, %r30, %r31, %r32, %r25, %r26, %r27, %r28, %r21, %r22, %r23, %r24, %r17, %r18, %r19, %r20};
+; CHECK-NEXT:    ld.param.v4.u32 {%r33, %r34, %r35, %r36}, [nvvm_tcgen05_st_16x64b_unpack_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r37, %r38, %r39, %r40}, [nvvm_tcgen05_st_16x64b_unpack_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r41, %r42, %r43, %r44}, [nvvm_tcgen05_st_16x64b_unpack_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r45, %r46, %r47, %r48}, [nvvm_tcgen05_st_16x64b_unpack_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r49, %r50, %r51, %r52}, [nvvm_tcgen05_st_16x64b_unpack_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r53, %r54, %r55, %r56}, [nvvm_tcgen05_st_16x64b_unpack_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r57, %r58, %r59, %r60}, [nvvm_tcgen05_st_16x64b_unpack_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r61, %r62, %r63, %r64}, [nvvm_tcgen05_st_16x64b_unpack_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x32.unpack::16b.b32 [%r1], {%r61, %r62, %r63, %r64, %r57, %r58, %r59, %r60, %r53, %r54, %r55, %r56, %r49, %r50, %r51, %r52, %r45, %r46, %r47, %r48, %r41, %r42, %r43, %r44, %r37, %r38, %r39, %r40, %r33, %r34, %r35, %r36};
+; CHECK-NEXT:    ld.param.v4.u32 {%r65, %r66, %r67, %r68}, [nvvm_tcgen05_st_16x64b_unpack_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r69, %r70, %r71, %r72}, [nvvm_tcgen05_st_16x64b_unpack_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r73, %r74, %r75, %r76}, [nvvm_tcgen05_st_16x64b_unpack_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r77, %r78, %r79, %r80}, [nvvm_tcgen05_st_16x64b_unpack_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r81, %r82, %r83, %r84}, [nvvm_tcgen05_st_16x64b_unpack_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r85, %r86, %r87, %r88}, [nvvm_tcgen05_st_16x64b_unpack_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r89, %r90, %r91, %r92}, [nvvm_tcgen05_st_16x64b_unpack_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r93, %r94, %r95, %r96}, [nvvm_tcgen05_st_16x64b_unpack_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r97, %r98, %r99, %r100}, [nvvm_tcgen05_st_16x64b_unpack_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r101, %r102, %r103, %r104}, [nvvm_tcgen05_st_16x64b_unpack_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r105, %r106, %r107, %r108}, [nvvm_tcgen05_st_16x64b_unpack_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r109, %r110, %r111, %r112}, [nvvm_tcgen05_st_16x64b_unpack_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r113, %r114, %r115, %r116}, [nvvm_tcgen05_st_16x64b_unpack_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r117, %r118, %r119, %r120}, [nvvm_tcgen05_st_16x64b_unpack_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r121, %r122, %r123, %r124}, [nvvm_tcgen05_st_16x64b_unpack_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r125, %r126, %r127, %r128}, [nvvm_tcgen05_st_16x64b_unpack_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x64.unpack::16b.b32 [%r1], {%r125, %r126, %r127, %r128, %r121, %r122, %r123, %r124, %r117, %r118, %r119, %r120, %r113, %r114, %r115, %r116, %r109, %r110, %r111, %r112, %r105, %r106, %r107, %r108, %r101, %r102, %r103, %r104, %r97, %r98, %r99, %r100, %r93, %r94, %r95, %r96, %r89, %r90, %r91, %r92, %r85, %r86, %r87, %r88, %r81, %r82, %r83, %r84, %r77, %r78, %r79, %r80, %r73, %r74, %r75, %r76, %r69, %r70, %r71, %r72, %r65, %r66, %r67, %r68};
+; CHECK-NEXT:    ld.param.v4.u32 {%r129, %r130, %r131, %r132}, [nvvm_tcgen05_st_16x64b_unpack_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r133, %r134, %r135, %r136}, [nvvm_tcgen05_st_16x64b_unpack_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r137, %r138, %r139, %r140}, [nvvm_tcgen05_st_16x64b_unpack_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r141, %r142, %r143, %r144}, [nvvm_tcgen05_st_16x64b_unpack_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r145, %r146, %r147, %r148}, [nvvm_tcgen05_st_16x64b_unpack_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r149, %r150, %r151, %r152}, [nvvm_tcgen05_st_16x64b_unpack_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r153, %r154, %r155, %r156}, [nvvm_tcgen05_st_16x64b_unpack_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r157, %r158, %r159, %r160}, [nvvm_tcgen05_st_16x64b_unpack_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r161, %r162, %r163, %r164}, [nvvm_tcgen05_st_16x64b_unpack_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r165, %r166, %r167, %r168}, [nvvm_tcgen05_st_16x64b_unpack_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r169, %r170, %r171, %r172}, [nvvm_tcgen05_st_16x64b_unpack_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r173, %r174, %r175, %r176}, [nvvm_tcgen05_st_16x64b_unpack_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r177, %r178, %r179, %r180}, [nvvm_tcgen05_st_16x64b_unpack_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r181, %r182, %r183, %r184}, [nvvm_tcgen05_st_16x64b_unpack_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r185, %r186, %r187, %r188}, [nvvm_tcgen05_st_16x64b_unpack_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r189, %r190, %r191, %r192}, [nvvm_tcgen05_st_16x64b_unpack_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r193, %r194, %r195, %r196}, [nvvm_tcgen05_st_16x64b_unpack_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r197, %r198, %r199, %r200}, [nvvm_tcgen05_st_16x64b_unpack_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r201, %r202, %r203, %r204}, [nvvm_tcgen05_st_16x64b_unpack_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r205, %r206, %r207, %r208}, [nvvm_tcgen05_st_16x64b_unpack_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r209, %r210, %r211, %r212}, [nvvm_tcgen05_st_16x64b_unpack_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r213, %r214, %r215, %r216}, [nvvm_tcgen05_st_16x64b_unpack_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r217, %r218, %r219, %r220}, [nvvm_tcgen05_st_16x64b_unpack_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r221, %r222, %r223, %r224}, [nvvm_tcgen05_st_16x64b_unpack_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r225, %r226, %r227, %r228}, [nvvm_tcgen05_st_16x64b_unpack_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r229, %r230, %r231, %r232}, [nvvm_tcgen05_st_16x64b_unpack_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r233, %r234, %r235, %r236}, [nvvm_tcgen05_st_16x64b_unpack_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r237, %r238, %r239, %r240}, [nvvm_tcgen05_st_16x64b_unpack_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r241, %r242, %r243, %r244}, [nvvm_tcgen05_st_16x64b_unpack_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r245, %r246, %r247, %r248}, [nvvm_tcgen05_st_16x64b_unpack_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r249, %r250, %r251, %r252}, [nvvm_tcgen05_st_16x64b_unpack_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r253, %r254, %r255, %r256}, [nvvm_tcgen05_st_16x64b_unpack_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x64b.x128.unpack::16b.b32 [%r1], {%r253, %r254, %r255, %r256, %r249, %r250, %r251, %r252, %r245, %r246, %r247, %r248, %r241, %r242, %r243, %r244, %r237, %r238, %r239, %r240, %r233, %r234, %r235, %r236, %r229, %r230, %r231, %r232, %r225, %r226, %r227, %r228, %r221, %r222, %r223, %r224, %r217, %r218, %r219, %r220, %r213, %r214, %r215, %r216, %r209, %r210, %r211, %r212, %r205, %r206, %r207, %r208, %r201, %r202, %r203, %r204, %r197, %r198, %r199, %r200, %r193, %r194, %r195, %r196, %r189, %r190, %r191, %r192, %r185, %r186, %r187, %r188, %r181, %r182, %r183, %r184, %r177, %r178, %r179, %r180, %r173, %r174, %r175, %r176, %r169, %r170, %r171, %r172, %r165, %r166, %r167, %r168, %r161, %r162, %r163, %r164, %r157, %r158, %r159, %r160, %r153, %r154, %r155, %r156, %r149, %r150, %r151, %r152, %r145, %r146, %r147, %r148, %r141, %r142, %r143, %r144, %r137, %r138, %r139, %r140, %r133, %r134, %r135, %r136, %r129, %r130, %r131, %r132};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x1(ptr addrspace(6) %taddr, i32 %stv1, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x2(ptr addrspace(6) %taddr, <2 x i32> %stv2, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x4(ptr addrspace(6) %taddr, <4 x i32> %stv4, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x8(ptr addrspace(6) %taddr, <8 x i32> %stv8, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x16(ptr addrspace(6) %taddr, <16 x i32> %stv16, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x32(ptr addrspace(6) %taddr, <32 x i32> %stv32, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x64(ptr addrspace(6) %taddr, <64 x i32> %stv64, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x64b.x128(ptr addrspace(6) %taddr, <128 x i32> %stv128, i1 1)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_16x128b
+define void @nvvm_tcgen05_st_16x128b(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_16x128b(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<256>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_16x128b_param_0];
+; CHECK-NEXT:    ld.param.v2.u32 {%r2, %r3}, [nvvm_tcgen05_st_16x128b_param_2];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x1.b32 [%r1], {%r2, %r3};
+; CHECK-NEXT:    ld.param.v4.u32 {%r4, %r5, %r6, %r7}, [nvvm_tcgen05_st_16x128b_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x2.b32 [%r1], {%r4, %r5, %r6, %r7};
+; CHECK-NEXT:    ld.param.v4.u32 {%r8, %r9, %r10, %r11}, [nvvm_tcgen05_st_16x128b_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r12, %r13, %r14, %r15}, [nvvm_tcgen05_st_16x128b_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x4.b32 [%r1], {%r12, %r13, %r14, %r15, %r8, %r9, %r10, %r11};
+; CHECK-NEXT:    ld.param.v4.u32 {%r16, %r17, %r18, %r19}, [nvvm_tcgen05_st_16x128b_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r20, %r21, %r22, %r23}, [nvvm_tcgen05_st_16x128b_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r24, %r25, %r26, %r27}, [nvvm_tcgen05_st_16x128b_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r28, %r29, %r30, %r31}, [nvvm_tcgen05_st_16x128b_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x8.b32 [%r1], {%r28, %r29, %r30, %r31, %r24, %r25, %r26, %r27, %r20, %r21, %r22, %r23, %r16, %r17, %r18, %r19};
+; CHECK-NEXT:    ld.param.v4.u32 {%r32, %r33, %r34, %r35}, [nvvm_tcgen05_st_16x128b_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r36, %r37, %r38, %r39}, [nvvm_tcgen05_st_16x128b_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r40, %r41, %r42, %r43}, [nvvm_tcgen05_st_16x128b_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r44, %r45, %r46, %r47}, [nvvm_tcgen05_st_16x128b_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r48, %r49, %r50, %r51}, [nvvm_tcgen05_st_16x128b_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r52, %r53, %r54, %r55}, [nvvm_tcgen05_st_16x128b_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r56, %r57, %r58, %r59}, [nvvm_tcgen05_st_16x128b_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r60, %r61, %r62, %r63}, [nvvm_tcgen05_st_16x128b_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x16.b32 [%r1], {%r60, %r61, %r62, %r63, %r56, %r57, %r58, %r59, %r52, %r53, %r54, %r55, %r48, %r49, %r50, %r51, %r44, %r45, %r46, %r47, %r40, %r41, %r42, %r43, %r36, %r37, %r38, %r39, %r32, %r33, %r34, %r35};
+; CHECK-NEXT:    ld.param.v4.u32 {%r64, %r65, %r66, %r67}, [nvvm_tcgen05_st_16x128b_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r68, %r69, %r70, %r71}, [nvvm_tcgen05_st_16x128b_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r72, %r73, %r74, %r75}, [nvvm_tcgen05_st_16x128b_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r76, %r77, %r78, %r79}, [nvvm_tcgen05_st_16x128b_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r80, %r81, %r82, %r83}, [nvvm_tcgen05_st_16x128b_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r84, %r85, %r86, %r87}, [nvvm_tcgen05_st_16x128b_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r88, %r89, %r90, %r91}, [nvvm_tcgen05_st_16x128b_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r92, %r93, %r94, %r95}, [nvvm_tcgen05_st_16x128b_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r96, %r97, %r98, %r99}, [nvvm_tcgen05_st_16x128b_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r100, %r101, %r102, %r103}, [nvvm_tcgen05_st_16x128b_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r104, %r105, %r106, %r107}, [nvvm_tcgen05_st_16x128b_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r108, %r109, %r110, %r111}, [nvvm_tcgen05_st_16x128b_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r112, %r113, %r114, %r115}, [nvvm_tcgen05_st_16x128b_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r116, %r117, %r118, %r119}, [nvvm_tcgen05_st_16x128b_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r120, %r121, %r122, %r123}, [nvvm_tcgen05_st_16x128b_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r124, %r125, %r126, %r127}, [nvvm_tcgen05_st_16x128b_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x32.b32 [%r1], {%r124, %r125, %r126, %r127, %r120, %r121, %r122, %r123, %r116, %r117, %r118, %r119, %r112, %r113, %r114, %r115, %r108, %r109, %r110, %r111, %r104, %r105, %r106, %r107, %r100, %r101, %r102, %r103, %r96, %r97, %r98, %r99, %r92, %r93, %r94, %r95, %r88, %r89, %r90, %r91, %r84, %r85, %r86, %r87, %r80, %r81, %r82, %r83, %r76, %r77, %r78, %r79, %r72, %r73, %r74, %r75, %r68, %r69, %r70, %r71, %r64, %r65, %r66, %r67};
+; CHECK-NEXT:    ld.param.v4.u32 {%r128, %r129, %r130, %r131}, [nvvm_tcgen05_st_16x128b_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r132, %r133, %r134, %r135}, [nvvm_tcgen05_st_16x128b_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r136, %r137, %r138, %r139}, [nvvm_tcgen05_st_16x128b_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r140, %r141, %r142, %r143}, [nvvm_tcgen05_st_16x128b_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r144, %r145, %r146, %r147}, [nvvm_tcgen05_st_16x128b_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r148, %r149, %r150, %r151}, [nvvm_tcgen05_st_16x128b_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r152, %r153, %r154, %r155}, [nvvm_tcgen05_st_16x128b_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r156, %r157, %r158, %r159}, [nvvm_tcgen05_st_16x128b_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r160, %r161, %r162, %r163}, [nvvm_tcgen05_st_16x128b_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r164, %r165, %r166, %r167}, [nvvm_tcgen05_st_16x128b_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r168, %r169, %r170, %r171}, [nvvm_tcgen05_st_16x128b_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r172, %r173, %r174, %r175}, [nvvm_tcgen05_st_16x128b_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r176, %r177, %r178, %r179}, [nvvm_tcgen05_st_16x128b_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r180, %r181, %r182, %r183}, [nvvm_tcgen05_st_16x128b_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r184, %r185, %r186, %r187}, [nvvm_tcgen05_st_16x128b_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r188, %r189, %r190, %r191}, [nvvm_tcgen05_st_16x128b_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r192, %r193, %r194, %r195}, [nvvm_tcgen05_st_16x128b_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r196, %r197, %r198, %r199}, [nvvm_tcgen05_st_16x128b_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r200, %r201, %r202, %r203}, [nvvm_tcgen05_st_16x128b_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r204, %r205, %r206, %r207}, [nvvm_tcgen05_st_16x128b_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r208, %r209, %r210, %r211}, [nvvm_tcgen05_st_16x128b_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r212, %r213, %r214, %r215}, [nvvm_tcgen05_st_16x128b_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r216, %r217, %r218, %r219}, [nvvm_tcgen05_st_16x128b_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r220, %r221, %r222, %r223}, [nvvm_tcgen05_st_16x128b_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r224, %r225, %r226, %r227}, [nvvm_tcgen05_st_16x128b_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r228, %r229, %r230, %r231}, [nvvm_tcgen05_st_16x128b_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r232, %r233, %r234, %r235}, [nvvm_tcgen05_st_16x128b_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r236, %r237, %r238, %r239}, [nvvm_tcgen05_st_16x128b_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r240, %r241, %r242, %r243}, [nvvm_tcgen05_st_16x128b_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r244, %r245, %r246, %r247}, [nvvm_tcgen05_st_16x128b_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r248, %r249, %r250, %r251}, [nvvm_tcgen05_st_16x128b_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r252, %r253, %r254, %r255}, [nvvm_tcgen05_st_16x128b_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x64.b32 [%r1], {%r252, %r253, %r254, %r255, %r248, %r249, %r250, %r251, %r244, %r245, %r246, %r247, %r240, %r241, %r242, %r243, %r236, %r237, %r238, %r239, %r232, %r233, %r234, %r235, %r228, %r229, %r230, %r231, %r224, %r225, %r226, %r227, %r220, %r221, %r222, %r223, %r216, %r217, %r218, %r219, %r212, %r213, %r214, %r215, %r208, %r209, %r210, %r211, %r204, %r205, %r206, %r207, %r200, %r201, %r202, %r203, %r196, %r197, %r198, %r199, %r192, %r193, %r194, %r195, %r188, %r189, %r190, %r191, %r184, %r185, %r186, %r187, %r180, %r181, %r182, %r183, %r176, %r177, %r178, %r179, %r172, %r173, %r174, %r175, %r168, %r169, %r170, %r171, %r164, %r165, %r166, %r167, %r160, %r161, %r162, %r163, %r156, %r157, %r158, %r159, %r152, %r153, %r154, %r155, %r148, %r149, %r150, %r151, %r144, %r145, %r146, %r147, %r140, %r141, %r142, %r143, %r136, %r137, %r138, %r139, %r132, %r133, %r134, %r135, %r128, %r129, %r130, %r131};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x1(ptr addrspace(6) %taddr, <2 x i32> %stv2, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x2(ptr addrspace(6) %taddr, <4 x i32> %stv4, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x4(ptr addrspace(6) %taddr, <8 x i32> %stv8, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x8(ptr addrspace(6) %taddr, <16 x i32> %stv16, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x16(ptr addrspace(6) %taddr, <32 x i32> %stv32, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x32(ptr addrspace(6) %taddr, <64 x i32> %stv64, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x64(ptr addrspace(6) %taddr, <128 x i32> %stv128, i1 0)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_16x128b_unpack
+define void @nvvm_tcgen05_st_16x128b_unpack(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_16x128b_unpack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<256>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_16x128b_unpack_param_0];
+; CHECK-NEXT:    ld.param.v2.u32 {%r2, %r3}, [nvvm_tcgen05_st_16x128b_unpack_param_2];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x1.unpack::16b.b32 [%r1], {%r2, %r3};
+; CHECK-NEXT:    ld.param.v4.u32 {%r4, %r5, %r6, %r7}, [nvvm_tcgen05_st_16x128b_unpack_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x2.unpack::16b.b32 [%r1], {%r4, %r5, %r6, %r7};
+; CHECK-NEXT:    ld.param.v4.u32 {%r8, %r9, %r10, %r11}, [nvvm_tcgen05_st_16x128b_unpack_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r12, %r13, %r14, %r15}, [nvvm_tcgen05_st_16x128b_unpack_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x4.unpack::16b.b32 [%r1], {%r12, %r13, %r14, %r15, %r8, %r9, %r10, %r11};
+; CHECK-NEXT:    ld.param.v4.u32 {%r16, %r17, %r18, %r19}, [nvvm_tcgen05_st_16x128b_unpack_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r20, %r21, %r22, %r23}, [nvvm_tcgen05_st_16x128b_unpack_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r24, %r25, %r26, %r27}, [nvvm_tcgen05_st_16x128b_unpack_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r28, %r29, %r30, %r31}, [nvvm_tcgen05_st_16x128b_unpack_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x8.unpack::16b.b32 [%r1], {%r28, %r29, %r30, %r31, %r24, %r25, %r26, %r27, %r20, %r21, %r22, %r23, %r16, %r17, %r18, %r19};
+; CHECK-NEXT:    ld.param.v4.u32 {%r32, %r33, %r34, %r35}, [nvvm_tcgen05_st_16x128b_unpack_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r36, %r37, %r38, %r39}, [nvvm_tcgen05_st_16x128b_unpack_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r40, %r41, %r42, %r43}, [nvvm_tcgen05_st_16x128b_unpack_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r44, %r45, %r46, %r47}, [nvvm_tcgen05_st_16x128b_unpack_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r48, %r49, %r50, %r51}, [nvvm_tcgen05_st_16x128b_unpack_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r52, %r53, %r54, %r55}, [nvvm_tcgen05_st_16x128b_unpack_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r56, %r57, %r58, %r59}, [nvvm_tcgen05_st_16x128b_unpack_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r60, %r61, %r62, %r63}, [nvvm_tcgen05_st_16x128b_unpack_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x16.unpack::16b.b32 [%r1], {%r60, %r61, %r62, %r63, %r56, %r57, %r58, %r59, %r52, %r53, %r54, %r55, %r48, %r49, %r50, %r51, %r44, %r45, %r46, %r47, %r40, %r41, %r42, %r43, %r36, %r37, %r38, %r39, %r32, %r33, %r34, %r35};
+; CHECK-NEXT:    ld.param.v4.u32 {%r64, %r65, %r66, %r67}, [nvvm_tcgen05_st_16x128b_unpack_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r68, %r69, %r70, %r71}, [nvvm_tcgen05_st_16x128b_unpack_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r72, %r73, %r74, %r75}, [nvvm_tcgen05_st_16x128b_unpack_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r76, %r77, %r78, %r79}, [nvvm_tcgen05_st_16x128b_unpack_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r80, %r81, %r82, %r83}, [nvvm_tcgen05_st_16x128b_unpack_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r84, %r85, %r86, %r87}, [nvvm_tcgen05_st_16x128b_unpack_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r88, %r89, %r90, %r91}, [nvvm_tcgen05_st_16x128b_unpack_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r92, %r93, %r94, %r95}, [nvvm_tcgen05_st_16x128b_unpack_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r96, %r97, %r98, %r99}, [nvvm_tcgen05_st_16x128b_unpack_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r100, %r101, %r102, %r103}, [nvvm_tcgen05_st_16x128b_unpack_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r104, %r105, %r106, %r107}, [nvvm_tcgen05_st_16x128b_unpack_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r108, %r109, %r110, %r111}, [nvvm_tcgen05_st_16x128b_unpack_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r112, %r113, %r114, %r115}, [nvvm_tcgen05_st_16x128b_unpack_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r116, %r117, %r118, %r119}, [nvvm_tcgen05_st_16x128b_unpack_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r120, %r121, %r122, %r123}, [nvvm_tcgen05_st_16x128b_unpack_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r124, %r125, %r126, %r127}, [nvvm_tcgen05_st_16x128b_unpack_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x32.unpack::16b.b32 [%r1], {%r124, %r125, %r126, %r127, %r120, %r121, %r122, %r123, %r116, %r117, %r118, %r119, %r112, %r113, %r114, %r115, %r108, %r109, %r110, %r111, %r104, %r105, %r106, %r107, %r100, %r101, %r102, %r103, %r96, %r97, %r98, %r99, %r92, %r93, %r94, %r95, %r88, %r89, %r90, %r91, %r84, %r85, %r86, %r87, %r80, %r81, %r82, %r83, %r76, %r77, %r78, %r79, %r72, %r73, %r74, %r75, %r68, %r69, %r70, %r71, %r64, %r65, %r66, %r67};
+; CHECK-NEXT:    ld.param.v4.u32 {%r128, %r129, %r130, %r131}, [nvvm_tcgen05_st_16x128b_unpack_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r132, %r133, %r134, %r135}, [nvvm_tcgen05_st_16x128b_unpack_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r136, %r137, %r138, %r139}, [nvvm_tcgen05_st_16x128b_unpack_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r140, %r141, %r142, %r143}, [nvvm_tcgen05_st_16x128b_unpack_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r144, %r145, %r146, %r147}, [nvvm_tcgen05_st_16x128b_unpack_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r148, %r149, %r150, %r151}, [nvvm_tcgen05_st_16x128b_unpack_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r152, %r153, %r154, %r155}, [nvvm_tcgen05_st_16x128b_unpack_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r156, %r157, %r158, %r159}, [nvvm_tcgen05_st_16x128b_unpack_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r160, %r161, %r162, %r163}, [nvvm_tcgen05_st_16x128b_unpack_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r164, %r165, %r166, %r167}, [nvvm_tcgen05_st_16x128b_unpack_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r168, %r169, %r170, %r171}, [nvvm_tcgen05_st_16x128b_unpack_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r172, %r173, %r174, %r175}, [nvvm_tcgen05_st_16x128b_unpack_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r176, %r177, %r178, %r179}, [nvvm_tcgen05_st_16x128b_unpack_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r180, %r181, %r182, %r183}, [nvvm_tcgen05_st_16x128b_unpack_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r184, %r185, %r186, %r187}, [nvvm_tcgen05_st_16x128b_unpack_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r188, %r189, %r190, %r191}, [nvvm_tcgen05_st_16x128b_unpack_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r192, %r193, %r194, %r195}, [nvvm_tcgen05_st_16x128b_unpack_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r196, %r197, %r198, %r199}, [nvvm_tcgen05_st_16x128b_unpack_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r200, %r201, %r202, %r203}, [nvvm_tcgen05_st_16x128b_unpack_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r204, %r205, %r206, %r207}, [nvvm_tcgen05_st_16x128b_unpack_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r208, %r209, %r210, %r211}, [nvvm_tcgen05_st_16x128b_unpack_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r212, %r213, %r214, %r215}, [nvvm_tcgen05_st_16x128b_unpack_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r216, %r217, %r218, %r219}, [nvvm_tcgen05_st_16x128b_unpack_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r220, %r221, %r222, %r223}, [nvvm_tcgen05_st_16x128b_unpack_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r224, %r225, %r226, %r227}, [nvvm_tcgen05_st_16x128b_unpack_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r228, %r229, %r230, %r231}, [nvvm_tcgen05_st_16x128b_unpack_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r232, %r233, %r234, %r235}, [nvvm_tcgen05_st_16x128b_unpack_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r236, %r237, %r238, %r239}, [nvvm_tcgen05_st_16x128b_unpack_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r240, %r241, %r242, %r243}, [nvvm_tcgen05_st_16x128b_unpack_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r244, %r245, %r246, %r247}, [nvvm_tcgen05_st_16x128b_unpack_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r248, %r249, %r250, %r251}, [nvvm_tcgen05_st_16x128b_unpack_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r252, %r253, %r254, %r255}, [nvvm_tcgen05_st_16x128b_unpack_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x128b.x64.unpack::16b.b32 [%r1], {%r252, %r253, %r254, %r255, %r248, %r249, %r250, %r251, %r244, %r245, %r246, %r247, %r240, %r241, %r242, %r243, %r236, %r237, %r238, %r239, %r232, %r233, %r234, %r235, %r228, %r229, %r230, %r231, %r224, %r225, %r226, %r227, %r220, %r221, %r222, %r223, %r216, %r217, %r218, %r219, %r212, %r213, %r214, %r215, %r208, %r209, %r210, %r211, %r204, %r205, %r206, %r207, %r200, %r201, %r202, %r203, %r196, %r197, %r198, %r199, %r192, %r193, %r194, %r195, %r188, %r189, %r190, %r191, %r184, %r185, %r186, %r187, %r180, %r181, %r182, %r183, %r176, %r177, %r178, %r179, %r172, %r173, %r174, %r175, %r168, %r169, %r170, %r171, %r164, %r165, %r166, %r167, %r160, %r161, %r162, %r163, %r156, %r157, %r158, %r159, %r152, %r153, %r154, %r155, %r148, %r149, %r150, %r151, %r144, %r145, %r146, %r147, %r140, %r141, %r142, %r143, %r136, %r137, %r138, %r139, %r132, %r133, %r134, %r135, %r128, %r129, %r130, %r131};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x1(ptr addrspace(6) %taddr, <2 x i32> %stv2, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x2(ptr addrspace(6) %taddr, <4 x i32> %stv4, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x4(ptr addrspace(6) %taddr, <8 x i32> %stv8, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x8(ptr addrspace(6) %taddr, <16 x i32> %stv16, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x16(ptr addrspace(6) %taddr, <32 x i32> %stv32, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x32(ptr addrspace(6) %taddr, <64 x i32> %stv64, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x128b.x64(ptr addrspace(6) %taddr, <128 x i32> %stv128, i1 1)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_16x256b
+define void @nvvm_tcgen05_st_16x256b(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_16x256b(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<254>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_16x256b_param_0];
+; CHECK-NEXT:    ld.param.v4.u32 {%r2, %r3, %r4, %r5}, [nvvm_tcgen05_st_16x256b_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x1.b32 [%r1], {%r2, %r3, %r4, %r5};
+; CHECK-NEXT:    ld.param.v4.u32 {%r6, %r7, %r8, %r9}, [nvvm_tcgen05_st_16x256b_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r10, %r11, %r12, %r13}, [nvvm_tcgen05_st_16x256b_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x2.b32 [%r1], {%r10, %r11, %r12, %r13, %r6, %r7, %r8, %r9};
+; CHECK-NEXT:    ld.param.v4.u32 {%r14, %r15, %r16, %r17}, [nvvm_tcgen05_st_16x256b_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r18, %r19, %r20, %r21}, [nvvm_tcgen05_st_16x256b_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r22, %r23, %r24, %r25}, [nvvm_tcgen05_st_16x256b_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r26, %r27, %r28, %r29}, [nvvm_tcgen05_st_16x256b_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x4.b32 [%r1], {%r26, %r27, %r28, %r29, %r22, %r23, %r24, %r25, %r18, %r19, %r20, %r21, %r14, %r15, %r16, %r17};
+; CHECK-NEXT:    ld.param.v4.u32 {%r30, %r31, %r32, %r33}, [nvvm_tcgen05_st_16x256b_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r34, %r35, %r36, %r37}, [nvvm_tcgen05_st_16x256b_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r38, %r39, %r40, %r41}, [nvvm_tcgen05_st_16x256b_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r42, %r43, %r44, %r45}, [nvvm_tcgen05_st_16x256b_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r46, %r47, %r48, %r49}, [nvvm_tcgen05_st_16x256b_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r50, %r51, %r52, %r53}, [nvvm_tcgen05_st_16x256b_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r54, %r55, %r56, %r57}, [nvvm_tcgen05_st_16x256b_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r58, %r59, %r60, %r61}, [nvvm_tcgen05_st_16x256b_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x8.b32 [%r1], {%r58, %r59, %r60, %r61, %r54, %r55, %r56, %r57, %r50, %r51, %r52, %r53, %r46, %r47, %r48, %r49, %r42, %r43, %r44, %r45, %r38, %r39, %r40, %r41, %r34, %r35, %r36, %r37, %r30, %r31, %r32, %r33};
+; CHECK-NEXT:    ld.param.v4.u32 {%r62, %r63, %r64, %r65}, [nvvm_tcgen05_st_16x256b_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r66, %r67, %r68, %r69}, [nvvm_tcgen05_st_16x256b_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r70, %r71, %r72, %r73}, [nvvm_tcgen05_st_16x256b_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r74, %r75, %r76, %r77}, [nvvm_tcgen05_st_16x256b_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r78, %r79, %r80, %r81}, [nvvm_tcgen05_st_16x256b_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r82, %r83, %r84, %r85}, [nvvm_tcgen05_st_16x256b_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r86, %r87, %r88, %r89}, [nvvm_tcgen05_st_16x256b_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r90, %r91, %r92, %r93}, [nvvm_tcgen05_st_16x256b_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r94, %r95, %r96, %r97}, [nvvm_tcgen05_st_16x256b_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r98, %r99, %r100, %r101}, [nvvm_tcgen05_st_16x256b_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r102, %r103, %r104, %r105}, [nvvm_tcgen05_st_16x256b_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r106, %r107, %r108, %r109}, [nvvm_tcgen05_st_16x256b_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r110, %r111, %r112, %r113}, [nvvm_tcgen05_st_16x256b_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r114, %r115, %r116, %r117}, [nvvm_tcgen05_st_16x256b_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r118, %r119, %r120, %r121}, [nvvm_tcgen05_st_16x256b_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r122, %r123, %r124, %r125}, [nvvm_tcgen05_st_16x256b_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x16.b32 [%r1], {%r122, %r123, %r124, %r125, %r118, %r119, %r120, %r121, %r114, %r115, %r116, %r117, %r110, %r111, %r112, %r113, %r106, %r107, %r108, %r109, %r102, %r103, %r104, %r105, %r98, %r99, %r100, %r101, %r94, %r95, %r96, %r97, %r90, %r91, %r92, %r93, %r86, %r87, %r88, %r89, %r82, %r83, %r84, %r85, %r78, %r79, %r80, %r81, %r74, %r75, %r76, %r77, %r70, %r71, %r72, %r73, %r66, %r67, %r68, %r69, %r62, %r63, %r64, %r65};
+; CHECK-NEXT:    ld.param.v4.u32 {%r126, %r127, %r128, %r129}, [nvvm_tcgen05_st_16x256b_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r130, %r131, %r132, %r133}, [nvvm_tcgen05_st_16x256b_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r134, %r135, %r136, %r137}, [nvvm_tcgen05_st_16x256b_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r138, %r139, %r140, %r141}, [nvvm_tcgen05_st_16x256b_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r142, %r143, %r144, %r145}, [nvvm_tcgen05_st_16x256b_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r146, %r147, %r148, %r149}, [nvvm_tcgen05_st_16x256b_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r150, %r151, %r152, %r153}, [nvvm_tcgen05_st_16x256b_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r154, %r155, %r156, %r157}, [nvvm_tcgen05_st_16x256b_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r158, %r159, %r160, %r161}, [nvvm_tcgen05_st_16x256b_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r162, %r163, %r164, %r165}, [nvvm_tcgen05_st_16x256b_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r166, %r167, %r168, %r169}, [nvvm_tcgen05_st_16x256b_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r170, %r171, %r172, %r173}, [nvvm_tcgen05_st_16x256b_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r174, %r175, %r176, %r177}, [nvvm_tcgen05_st_16x256b_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r178, %r179, %r180, %r181}, [nvvm_tcgen05_st_16x256b_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r182, %r183, %r184, %r185}, [nvvm_tcgen05_st_16x256b_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r186, %r187, %r188, %r189}, [nvvm_tcgen05_st_16x256b_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r190, %r191, %r192, %r193}, [nvvm_tcgen05_st_16x256b_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r194, %r195, %r196, %r197}, [nvvm_tcgen05_st_16x256b_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r198, %r199, %r200, %r201}, [nvvm_tcgen05_st_16x256b_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r202, %r203, %r204, %r205}, [nvvm_tcgen05_st_16x256b_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r206, %r207, %r208, %r209}, [nvvm_tcgen05_st_16x256b_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r210, %r211, %r212, %r213}, [nvvm_tcgen05_st_16x256b_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r214, %r215, %r216, %r217}, [nvvm_tcgen05_st_16x256b_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r218, %r219, %r220, %r221}, [nvvm_tcgen05_st_16x256b_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r222, %r223, %r224, %r225}, [nvvm_tcgen05_st_16x256b_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r226, %r227, %r228, %r229}, [nvvm_tcgen05_st_16x256b_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r230, %r231, %r232, %r233}, [nvvm_tcgen05_st_16x256b_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r234, %r235, %r236, %r237}, [nvvm_tcgen05_st_16x256b_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r238, %r239, %r240, %r241}, [nvvm_tcgen05_st_16x256b_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r242, %r243, %r244, %r245}, [nvvm_tcgen05_st_16x256b_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r246, %r247, %r248, %r249}, [nvvm_tcgen05_st_16x256b_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r250, %r251, %r252, %r253}, [nvvm_tcgen05_st_16x256b_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x32.b32 [%r1], {%r250, %r251, %r252, %r253, %r246, %r247, %r248, %r249, %r242, %r243, %r244, %r245, %r238, %r239, %r240, %r241, %r234, %r235, %r236, %r237, %r230, %r231, %r232, %r233, %r226, %r227, %r228, %r229, %r222, %r223, %r224, %r225, %r218, %r219, %r220, %r221, %r214, %r215, %r216, %r217, %r210, %r211, %r212, %r213, %r206, %r207, %r208, %r209, %r202, %r203, %r204, %r205, %r198, %r199, %r200, %r201, %r194, %r195, %r196, %r197, %r190, %r191, %r192, %r193, %r186, %r187, %r188, %r189, %r182, %r183, %r184, %r185, %r178, %r179, %r180, %r181, %r174, %r175, %r176, %r177, %r170, %r171, %r172, %r173, %r166, %r167, %r168, %r169, %r162, %r163, %r164, %r165, %r158, %r159, %r160, %r161, %r154, %r155, %r156, %r157, %r150, %r151, %r152, %r153, %r146, %r147, %r148, %r149, %r142, %r143, %r144, %r145, %r138, %r139, %r140, %r141, %r134, %r135, %r136, %r137, %r130, %r131, %r132, %r133, %r126, %r127, %r128, %r129};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x1(ptr addrspace(6) %taddr, <4 x i32> %stv4, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x2(ptr addrspace(6) %taddr, <8 x i32> %stv8, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x4(ptr addrspace(6) %taddr, <16 x i32> %stv16, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x8(ptr addrspace(6) %taddr, <32 x i32> %stv32, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x16(ptr addrspace(6) %taddr, <64 x i32> %stv64, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x32(ptr addrspace(6) %taddr, <128 x i32> %stv128, i1 0)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_16x256b_unpack
+define void @nvvm_tcgen05_st_16x256b_unpack(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_16x256b_unpack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<254>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_16x256b_unpack_param_0];
+; CHECK-NEXT:    ld.param.v4.u32 {%r2, %r3, %r4, %r5}, [nvvm_tcgen05_st_16x256b_unpack_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x1.unpack::16b.b32 [%r1], {%r2, %r3, %r4, %r5};
+; CHECK-NEXT:    ld.param.v4.u32 {%r6, %r7, %r8, %r9}, [nvvm_tcgen05_st_16x256b_unpack_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r10, %r11, %r12, %r13}, [nvvm_tcgen05_st_16x256b_unpack_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x2.unpack::16b.b32 [%r1], {%r10, %r11, %r12, %r13, %r6, %r7, %r8, %r9};
+; CHECK-NEXT:    ld.param.v4.u32 {%r14, %r15, %r16, %r17}, [nvvm_tcgen05_st_16x256b_unpack_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r18, %r19, %r20, %r21}, [nvvm_tcgen05_st_16x256b_unpack_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r22, %r23, %r24, %r25}, [nvvm_tcgen05_st_16x256b_unpack_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r26, %r27, %r28, %r29}, [nvvm_tcgen05_st_16x256b_unpack_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x4.unpack::16b.b32 [%r1], {%r26, %r27, %r28, %r29, %r22, %r23, %r24, %r25, %r18, %r19, %r20, %r21, %r14, %r15, %r16, %r17};
+; CHECK-NEXT:    ld.param.v4.u32 {%r30, %r31, %r32, %r33}, [nvvm_tcgen05_st_16x256b_unpack_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r34, %r35, %r36, %r37}, [nvvm_tcgen05_st_16x256b_unpack_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r38, %r39, %r40, %r41}, [nvvm_tcgen05_st_16x256b_unpack_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r42, %r43, %r44, %r45}, [nvvm_tcgen05_st_16x256b_unpack_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r46, %r47, %r48, %r49}, [nvvm_tcgen05_st_16x256b_unpack_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r50, %r51, %r52, %r53}, [nvvm_tcgen05_st_16x256b_unpack_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r54, %r55, %r56, %r57}, [nvvm_tcgen05_st_16x256b_unpack_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r58, %r59, %r60, %r61}, [nvvm_tcgen05_st_16x256b_unpack_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x8.unpack::16b.b32 [%r1], {%r58, %r59, %r60, %r61, %r54, %r55, %r56, %r57, %r50, %r51, %r52, %r53, %r46, %r47, %r48, %r49, %r42, %r43, %r44, %r45, %r38, %r39, %r40, %r41, %r34, %r35, %r36, %r37, %r30, %r31, %r32, %r33};
+; CHECK-NEXT:    ld.param.v4.u32 {%r62, %r63, %r64, %r65}, [nvvm_tcgen05_st_16x256b_unpack_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r66, %r67, %r68, %r69}, [nvvm_tcgen05_st_16x256b_unpack_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r70, %r71, %r72, %r73}, [nvvm_tcgen05_st_16x256b_unpack_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r74, %r75, %r76, %r77}, [nvvm_tcgen05_st_16x256b_unpack_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r78, %r79, %r80, %r81}, [nvvm_tcgen05_st_16x256b_unpack_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r82, %r83, %r84, %r85}, [nvvm_tcgen05_st_16x256b_unpack_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r86, %r87, %r88, %r89}, [nvvm_tcgen05_st_16x256b_unpack_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r90, %r91, %r92, %r93}, [nvvm_tcgen05_st_16x256b_unpack_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r94, %r95, %r96, %r97}, [nvvm_tcgen05_st_16x256b_unpack_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r98, %r99, %r100, %r101}, [nvvm_tcgen05_st_16x256b_unpack_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r102, %r103, %r104, %r105}, [nvvm_tcgen05_st_16x256b_unpack_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r106, %r107, %r108, %r109}, [nvvm_tcgen05_st_16x256b_unpack_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r110, %r111, %r112, %r113}, [nvvm_tcgen05_st_16x256b_unpack_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r114, %r115, %r116, %r117}, [nvvm_tcgen05_st_16x256b_unpack_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r118, %r119, %r120, %r121}, [nvvm_tcgen05_st_16x256b_unpack_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r122, %r123, %r124, %r125}, [nvvm_tcgen05_st_16x256b_unpack_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x16.unpack::16b.b32 [%r1], {%r122, %r123, %r124, %r125, %r118, %r119, %r120, %r121, %r114, %r115, %r116, %r117, %r110, %r111, %r112, %r113, %r106, %r107, %r108, %r109, %r102, %r103, %r104, %r105, %r98, %r99, %r100, %r101, %r94, %r95, %r96, %r97, %r90, %r91, %r92, %r93, %r86, %r87, %r88, %r89, %r82, %r83, %r84, %r85, %r78, %r79, %r80, %r81, %r74, %r75, %r76, %r77, %r70, %r71, %r72, %r73, %r66, %r67, %r68, %r69, %r62, %r63, %r64, %r65};
+; CHECK-NEXT:    ld.param.v4.u32 {%r126, %r127, %r128, %r129}, [nvvm_tcgen05_st_16x256b_unpack_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r130, %r131, %r132, %r133}, [nvvm_tcgen05_st_16x256b_unpack_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r134, %r135, %r136, %r137}, [nvvm_tcgen05_st_16x256b_unpack_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r138, %r139, %r140, %r141}, [nvvm_tcgen05_st_16x256b_unpack_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r142, %r143, %r144, %r145}, [nvvm_tcgen05_st_16x256b_unpack_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r146, %r147, %r148, %r149}, [nvvm_tcgen05_st_16x256b_unpack_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r150, %r151, %r152, %r153}, [nvvm_tcgen05_st_16x256b_unpack_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r154, %r155, %r156, %r157}, [nvvm_tcgen05_st_16x256b_unpack_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r158, %r159, %r160, %r161}, [nvvm_tcgen05_st_16x256b_unpack_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r162, %r163, %r164, %r165}, [nvvm_tcgen05_st_16x256b_unpack_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r166, %r167, %r168, %r169}, [nvvm_tcgen05_st_16x256b_unpack_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r170, %r171, %r172, %r173}, [nvvm_tcgen05_st_16x256b_unpack_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r174, %r175, %r176, %r177}, [nvvm_tcgen05_st_16x256b_unpack_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r178, %r179, %r180, %r181}, [nvvm_tcgen05_st_16x256b_unpack_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r182, %r183, %r184, %r185}, [nvvm_tcgen05_st_16x256b_unpack_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r186, %r187, %r188, %r189}, [nvvm_tcgen05_st_16x256b_unpack_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r190, %r191, %r192, %r193}, [nvvm_tcgen05_st_16x256b_unpack_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r194, %r195, %r196, %r197}, [nvvm_tcgen05_st_16x256b_unpack_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r198, %r199, %r200, %r201}, [nvvm_tcgen05_st_16x256b_unpack_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r202, %r203, %r204, %r205}, [nvvm_tcgen05_st_16x256b_unpack_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r206, %r207, %r208, %r209}, [nvvm_tcgen05_st_16x256b_unpack_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r210, %r211, %r212, %r213}, [nvvm_tcgen05_st_16x256b_unpack_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r214, %r215, %r216, %r217}, [nvvm_tcgen05_st_16x256b_unpack_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r218, %r219, %r220, %r221}, [nvvm_tcgen05_st_16x256b_unpack_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r222, %r223, %r224, %r225}, [nvvm_tcgen05_st_16x256b_unpack_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r226, %r227, %r228, %r229}, [nvvm_tcgen05_st_16x256b_unpack_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r230, %r231, %r232, %r233}, [nvvm_tcgen05_st_16x256b_unpack_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r234, %r235, %r236, %r237}, [nvvm_tcgen05_st_16x256b_unpack_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r238, %r239, %r240, %r241}, [nvvm_tcgen05_st_16x256b_unpack_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r242, %r243, %r244, %r245}, [nvvm_tcgen05_st_16x256b_unpack_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r246, %r247, %r248, %r249}, [nvvm_tcgen05_st_16x256b_unpack_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r250, %r251, %r252, %r253}, [nvvm_tcgen05_st_16x256b_unpack_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x256b.x32.unpack::16b.b32 [%r1], {%r250, %r251, %r252, %r253, %r246, %r247, %r248, %r249, %r242, %r243, %r244, %r245, %r238, %r239, %r240, %r241, %r234, %r235, %r236, %r237, %r230, %r231, %r232, %r233, %r226, %r227, %r228, %r229, %r222, %r223, %r224, %r225, %r218, %r219, %r220, %r221, %r214, %r215, %r216, %r217, %r210, %r211, %r212, %r213, %r206, %r207, %r208, %r209, %r202, %r203, %r204, %r205, %r198, %r199, %r200, %r201, %r194, %r195, %r196, %r197, %r190, %r191, %r192, %r193, %r186, %r187, %r188, %r189, %r182, %r183, %r184, %r185, %r178, %r179, %r180, %r181, %r174, %r175, %r176, %r177, %r170, %r171, %r172, %r173, %r166, %r167, %r168, %r169, %r162, %r163, %r164, %r165, %r158, %r159, %r160, %r161, %r154, %r155, %r156, %r157, %r150, %r151, %r152, %r153, %r146, %r147, %r148, %r149, %r142, %r143, %r144, %r145, %r138, %r139, %r140, %r141, %r134, %r135, %r136, %r137, %r130, %r131, %r132, %r133, %r126, %r127, %r128, %r129};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x1(ptr addrspace(6) %taddr, <4 x i32> %stv4, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x2(ptr addrspace(6) %taddr, <8 x i32> %stv8, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x4(ptr addrspace(6) %taddr, <16 x i32> %stv16, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x8(ptr addrspace(6) %taddr, <32 x i32> %stv32, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x16(ptr addrspace(6) %taddr, <64 x i32> %stv64, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x256b.x32(ptr addrspace(6) %taddr, <128 x i32> %stv128, i1 1)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_32x32b
+define void @nvvm_tcgen05_st_32x32b(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_32x32b(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_32x32b_param_0];
+; CHECK-NEXT:    ld.param.u32 %r2, [nvvm_tcgen05_st_32x32b_param_1];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x1.b32 [%r1], {%r2};
+; CHECK-NEXT:    ld.param.v2.u32 {%r3, %r4}, [nvvm_tcgen05_st_32x32b_param_2];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x2.b32 [%r1], {%r3, %r4};
+; CHECK-NEXT:    ld.param.v4.u32 {%r5, %r6, %r7, %r8}, [nvvm_tcgen05_st_32x32b_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x4.b32 [%r1], {%r5, %r6, %r7, %r8};
+; CHECK-NEXT:    ld.param.v4.u32 {%r9, %r10, %r11, %r12}, [nvvm_tcgen05_st_32x32b_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r13, %r14, %r15, %r16}, [nvvm_tcgen05_st_32x32b_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x8.b32 [%r1], {%r13, %r14, %r15, %r16, %r9, %r10, %r11, %r12};
+; CHECK-NEXT:    ld.param.v4.u32 {%r17, %r18, %r19, %r20}, [nvvm_tcgen05_st_32x32b_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r21, %r22, %r23, %r24}, [nvvm_tcgen05_st_32x32b_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r25, %r26, %r27, %r28}, [nvvm_tcgen05_st_32x32b_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r29, %r30, %r31, %r32}, [nvvm_tcgen05_st_32x32b_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x16.b32 [%r1], {%r29, %r30, %r31, %r32, %r25, %r26, %r27, %r28, %r21, %r22, %r23, %r24, %r17, %r18, %r19, %r20};
+; CHECK-NEXT:    ld.param.v4.u32 {%r33, %r34, %r35, %r36}, [nvvm_tcgen05_st_32x32b_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r37, %r38, %r39, %r40}, [nvvm_tcgen05_st_32x32b_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r41, %r42, %r43, %r44}, [nvvm_tcgen05_st_32x32b_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r45, %r46, %r47, %r48}, [nvvm_tcgen05_st_32x32b_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r49, %r50, %r51, %r52}, [nvvm_tcgen05_st_32x32b_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r53, %r54, %r55, %r56}, [nvvm_tcgen05_st_32x32b_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r57, %r58, %r59, %r60}, [nvvm_tcgen05_st_32x32b_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r61, %r62, %r63, %r64}, [nvvm_tcgen05_st_32x32b_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x32.b32 [%r1], {%r61, %r62, %r63, %r64, %r57, %r58, %r59, %r60, %r53, %r54, %r55, %r56, %r49, %r50, %r51, %r52, %r45, %r46, %r47, %r48, %r41, %r42, %r43, %r44, %r37, %r38, %r39, %r40, %r33, %r34, %r35, %r36};
+; CHECK-NEXT:    ld.param.v4.u32 {%r65, %r66, %r67, %r68}, [nvvm_tcgen05_st_32x32b_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r69, %r70, %r71, %r72}, [nvvm_tcgen05_st_32x32b_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r73, %r74, %r75, %r76}, [nvvm_tcgen05_st_32x32b_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r77, %r78, %r79, %r80}, [nvvm_tcgen05_st_32x32b_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r81, %r82, %r83, %r84}, [nvvm_tcgen05_st_32x32b_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r85, %r86, %r87, %r88}, [nvvm_tcgen05_st_32x32b_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r89, %r90, %r91, %r92}, [nvvm_tcgen05_st_32x32b_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r93, %r94, %r95, %r96}, [nvvm_tcgen05_st_32x32b_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r97, %r98, %r99, %r100}, [nvvm_tcgen05_st_32x32b_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r101, %r102, %r103, %r104}, [nvvm_tcgen05_st_32x32b_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r105, %r106, %r107, %r108}, [nvvm_tcgen05_st_32x32b_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r109, %r110, %r111, %r112}, [nvvm_tcgen05_st_32x32b_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r113, %r114, %r115, %r116}, [nvvm_tcgen05_st_32x32b_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r117, %r118, %r119, %r120}, [nvvm_tcgen05_st_32x32b_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r121, %r122, %r123, %r124}, [nvvm_tcgen05_st_32x32b_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r125, %r126, %r127, %r128}, [nvvm_tcgen05_st_32x32b_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x64.b32 [%r1], {%r125, %r126, %r127, %r128, %r121, %r122, %r123, %r124, %r117, %r118, %r119, %r120, %r113, %r114, %r115, %r116, %r109, %r110, %r111, %r112, %r105, %r106, %r107, %r108, %r101, %r102, %r103, %r104, %r97, %r98, %r99, %r100, %r93, %r94, %r95, %r96, %r89, %r90, %r91, %r92, %r85, %r86, %r87, %r88, %r81, %r82, %r83, %r84, %r77, %r78, %r79, %r80, %r73, %r74, %r75, %r76, %r69, %r70, %r71, %r72, %r65, %r66, %r67, %r68};
+; CHECK-NEXT:    ld.param.v4.u32 {%r129, %r130, %r131, %r132}, [nvvm_tcgen05_st_32x32b_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r133, %r134, %r135, %r136}, [nvvm_tcgen05_st_32x32b_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r137, %r138, %r139, %r140}, [nvvm_tcgen05_st_32x32b_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r141, %r142, %r143, %r144}, [nvvm_tcgen05_st_32x32b_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r145, %r146, %r147, %r148}, [nvvm_tcgen05_st_32x32b_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r149, %r150, %r151, %r152}, [nvvm_tcgen05_st_32x32b_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r153, %r154, %r155, %r156}, [nvvm_tcgen05_st_32x32b_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r157, %r158, %r159, %r160}, [nvvm_tcgen05_st_32x32b_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r161, %r162, %r163, %r164}, [nvvm_tcgen05_st_32x32b_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r165, %r166, %r167, %r168}, [nvvm_tcgen05_st_32x32b_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r169, %r170, %r171, %r172}, [nvvm_tcgen05_st_32x32b_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r173, %r174, %r175, %r176}, [nvvm_tcgen05_st_32x32b_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r177, %r178, %r179, %r180}, [nvvm_tcgen05_st_32x32b_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r181, %r182, %r183, %r184}, [nvvm_tcgen05_st_32x32b_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r185, %r186, %r187, %r188}, [nvvm_tcgen05_st_32x32b_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r189, %r190, %r191, %r192}, [nvvm_tcgen05_st_32x32b_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r193, %r194, %r195, %r196}, [nvvm_tcgen05_st_32x32b_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r197, %r198, %r199, %r200}, [nvvm_tcgen05_st_32x32b_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r201, %r202, %r203, %r204}, [nvvm_tcgen05_st_32x32b_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r205, %r206, %r207, %r208}, [nvvm_tcgen05_st_32x32b_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r209, %r210, %r211, %r212}, [nvvm_tcgen05_st_32x32b_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r213, %r214, %r215, %r216}, [nvvm_tcgen05_st_32x32b_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r217, %r218, %r219, %r220}, [nvvm_tcgen05_st_32x32b_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r221, %r222, %r223, %r224}, [nvvm_tcgen05_st_32x32b_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r225, %r226, %r227, %r228}, [nvvm_tcgen05_st_32x32b_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r229, %r230, %r231, %r232}, [nvvm_tcgen05_st_32x32b_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r233, %r234, %r235, %r236}, [nvvm_tcgen05_st_32x32b_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r237, %r238, %r239, %r240}, [nvvm_tcgen05_st_32x32b_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r241, %r242, %r243, %r244}, [nvvm_tcgen05_st_32x32b_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r245, %r246, %r247, %r248}, [nvvm_tcgen05_st_32x32b_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r249, %r250, %r251, %r252}, [nvvm_tcgen05_st_32x32b_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r253, %r254, %r255, %r256}, [nvvm_tcgen05_st_32x32b_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x128.b32 [%r1], {%r253, %r254, %r255, %r256, %r249, %r250, %r251, %r252, %r245, %r246, %r247, %r248, %r241, %r242, %r243, %r244, %r237, %r238, %r239, %r240, %r233, %r234, %r235, %r236, %r229, %r230, %r231, %r232, %r225, %r226, %r227, %r228, %r221, %r222, %r223, %r224, %r217, %r218, %r219, %r220, %r213, %r214, %r215, %r216, %r209, %r210, %r211, %r212, %r205, %r206, %r207, %r208, %r201, %r202, %r203, %r204, %r197, %r198, %r199, %r200, %r193, %r194, %r195, %r196, %r189, %r190, %r191, %r192, %r185, %r186, %r187, %r188, %r181, %r182, %r183, %r184, %r177, %r178, %r179, %r180, %r173, %r174, %r175, %r176, %r169, %r170, %r171, %r172, %r165, %r166, %r167, %r168, %r161, %r162, %r163, %r164, %r157, %r158, %r159, %r160, %r153, %r154, %r155, %r156, %r149, %r150, %r151, %r152, %r145, %r146, %r147, %r148, %r141, %r142, %r143, %r144, %r137, %r138, %r139, %r140, %r133, %r134, %r135, %r136, %r129, %r130, %r131, %r132};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x1(ptr addrspace(6) %taddr, i32 %stv1, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x2(ptr addrspace(6) %taddr, <2 x i32> %stv2, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x4(ptr addrspace(6) %taddr, <4 x i32> %stv4, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x8(ptr addrspace(6) %taddr, <8 x i32> %stv8, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x16(ptr addrspace(6) %taddr, <16 x i32> %stv16, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x32(ptr addrspace(6) %taddr, <32 x i32> %stv32, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x64(ptr addrspace(6) %taddr, <64 x i32> %stv64, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x128(ptr addrspace(6) %taddr, <128 x i32> %stv128, i1 0)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_32x32b_unpack
+define void @nvvm_tcgen05_st_32x32b_unpack(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_32x32b_unpack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_32x32b_unpack_param_0];
+; CHECK-NEXT:    ld.param.u32 %r2, [nvvm_tcgen05_st_32x32b_unpack_param_1];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x1.unpack::16b.b32 [%r1], {%r2};
+; CHECK-NEXT:    ld.param.v2.u32 {%r3, %r4}, [nvvm_tcgen05_st_32x32b_unpack_param_2];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x2.unpack::16b.b32 [%r1], {%r3, %r4};
+; CHECK-NEXT:    ld.param.v4.u32 {%r5, %r6, %r7, %r8}, [nvvm_tcgen05_st_32x32b_unpack_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x4.unpack::16b.b32 [%r1], {%r5, %r6, %r7, %r8};
+; CHECK-NEXT:    ld.param.v4.u32 {%r9, %r10, %r11, %r12}, [nvvm_tcgen05_st_32x32b_unpack_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r13, %r14, %r15, %r16}, [nvvm_tcgen05_st_32x32b_unpack_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x8.unpack::16b.b32 [%r1], {%r13, %r14, %r15, %r16, %r9, %r10, %r11, %r12};
+; CHECK-NEXT:    ld.param.v4.u32 {%r17, %r18, %r19, %r20}, [nvvm_tcgen05_st_32x32b_unpack_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r21, %r22, %r23, %r24}, [nvvm_tcgen05_st_32x32b_unpack_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r25, %r26, %r27, %r28}, [nvvm_tcgen05_st_32x32b_unpack_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r29, %r30, %r31, %r32}, [nvvm_tcgen05_st_32x32b_unpack_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x16.unpack::16b.b32 [%r1], {%r29, %r30, %r31, %r32, %r25, %r26, %r27, %r28, %r21, %r22, %r23, %r24, %r17, %r18, %r19, %r20};
+; CHECK-NEXT:    ld.param.v4.u32 {%r33, %r34, %r35, %r36}, [nvvm_tcgen05_st_32x32b_unpack_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r37, %r38, %r39, %r40}, [nvvm_tcgen05_st_32x32b_unpack_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r41, %r42, %r43, %r44}, [nvvm_tcgen05_st_32x32b_unpack_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r45, %r46, %r47, %r48}, [nvvm_tcgen05_st_32x32b_unpack_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r49, %r50, %r51, %r52}, [nvvm_tcgen05_st_32x32b_unpack_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r53, %r54, %r55, %r56}, [nvvm_tcgen05_st_32x32b_unpack_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r57, %r58, %r59, %r60}, [nvvm_tcgen05_st_32x32b_unpack_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r61, %r62, %r63, %r64}, [nvvm_tcgen05_st_32x32b_unpack_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x32.unpack::16b.b32 [%r1], {%r61, %r62, %r63, %r64, %r57, %r58, %r59, %r60, %r53, %r54, %r55, %r56, %r49, %r50, %r51, %r52, %r45, %r46, %r47, %r48, %r41, %r42, %r43, %r44, %r37, %r38, %r39, %r40, %r33, %r34, %r35, %r36};
+; CHECK-NEXT:    ld.param.v4.u32 {%r65, %r66, %r67, %r68}, [nvvm_tcgen05_st_32x32b_unpack_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r69, %r70, %r71, %r72}, [nvvm_tcgen05_st_32x32b_unpack_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r73, %r74, %r75, %r76}, [nvvm_tcgen05_st_32x32b_unpack_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r77, %r78, %r79, %r80}, [nvvm_tcgen05_st_32x32b_unpack_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r81, %r82, %r83, %r84}, [nvvm_tcgen05_st_32x32b_unpack_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r85, %r86, %r87, %r88}, [nvvm_tcgen05_st_32x32b_unpack_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r89, %r90, %r91, %r92}, [nvvm_tcgen05_st_32x32b_unpack_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r93, %r94, %r95, %r96}, [nvvm_tcgen05_st_32x32b_unpack_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r97, %r98, %r99, %r100}, [nvvm_tcgen05_st_32x32b_unpack_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r101, %r102, %r103, %r104}, [nvvm_tcgen05_st_32x32b_unpack_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r105, %r106, %r107, %r108}, [nvvm_tcgen05_st_32x32b_unpack_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r109, %r110, %r111, %r112}, [nvvm_tcgen05_st_32x32b_unpack_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r113, %r114, %r115, %r116}, [nvvm_tcgen05_st_32x32b_unpack_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r117, %r118, %r119, %r120}, [nvvm_tcgen05_st_32x32b_unpack_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r121, %r122, %r123, %r124}, [nvvm_tcgen05_st_32x32b_unpack_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r125, %r126, %r127, %r128}, [nvvm_tcgen05_st_32x32b_unpack_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x64.unpack::16b.b32 [%r1], {%r125, %r126, %r127, %r128, %r121, %r122, %r123, %r124, %r117, %r118, %r119, %r120, %r113, %r114, %r115, %r116, %r109, %r110, %r111, %r112, %r105, %r106, %r107, %r108, %r101, %r102, %r103, %r104, %r97, %r98, %r99, %r100, %r93, %r94, %r95, %r96, %r89, %r90, %r91, %r92, %r85, %r86, %r87, %r88, %r81, %r82, %r83, %r84, %r77, %r78, %r79, %r80, %r73, %r74, %r75, %r76, %r69, %r70, %r71, %r72, %r65, %r66, %r67, %r68};
+; CHECK-NEXT:    ld.param.v4.u32 {%r129, %r130, %r131, %r132}, [nvvm_tcgen05_st_32x32b_unpack_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r133, %r134, %r135, %r136}, [nvvm_tcgen05_st_32x32b_unpack_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r137, %r138, %r139, %r140}, [nvvm_tcgen05_st_32x32b_unpack_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r141, %r142, %r143, %r144}, [nvvm_tcgen05_st_32x32b_unpack_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r145, %r146, %r147, %r148}, [nvvm_tcgen05_st_32x32b_unpack_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r149, %r150, %r151, %r152}, [nvvm_tcgen05_st_32x32b_unpack_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r153, %r154, %r155, %r156}, [nvvm_tcgen05_st_32x32b_unpack_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r157, %r158, %r159, %r160}, [nvvm_tcgen05_st_32x32b_unpack_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r161, %r162, %r163, %r164}, [nvvm_tcgen05_st_32x32b_unpack_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r165, %r166, %r167, %r168}, [nvvm_tcgen05_st_32x32b_unpack_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r169, %r170, %r171, %r172}, [nvvm_tcgen05_st_32x32b_unpack_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r173, %r174, %r175, %r176}, [nvvm_tcgen05_st_32x32b_unpack_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r177, %r178, %r179, %r180}, [nvvm_tcgen05_st_32x32b_unpack_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r181, %r182, %r183, %r184}, [nvvm_tcgen05_st_32x32b_unpack_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r185, %r186, %r187, %r188}, [nvvm_tcgen05_st_32x32b_unpack_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r189, %r190, %r191, %r192}, [nvvm_tcgen05_st_32x32b_unpack_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r193, %r194, %r195, %r196}, [nvvm_tcgen05_st_32x32b_unpack_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r197, %r198, %r199, %r200}, [nvvm_tcgen05_st_32x32b_unpack_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r201, %r202, %r203, %r204}, [nvvm_tcgen05_st_32x32b_unpack_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r205, %r206, %r207, %r208}, [nvvm_tcgen05_st_32x32b_unpack_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r209, %r210, %r211, %r212}, [nvvm_tcgen05_st_32x32b_unpack_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r213, %r214, %r215, %r216}, [nvvm_tcgen05_st_32x32b_unpack_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r217, %r218, %r219, %r220}, [nvvm_tcgen05_st_32x32b_unpack_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r221, %r222, %r223, %r224}, [nvvm_tcgen05_st_32x32b_unpack_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r225, %r226, %r227, %r228}, [nvvm_tcgen05_st_32x32b_unpack_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r229, %r230, %r231, %r232}, [nvvm_tcgen05_st_32x32b_unpack_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r233, %r234, %r235, %r236}, [nvvm_tcgen05_st_32x32b_unpack_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r237, %r238, %r239, %r240}, [nvvm_tcgen05_st_32x32b_unpack_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r241, %r242, %r243, %r244}, [nvvm_tcgen05_st_32x32b_unpack_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r245, %r246, %r247, %r248}, [nvvm_tcgen05_st_32x32b_unpack_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r249, %r250, %r251, %r252}, [nvvm_tcgen05_st_32x32b_unpack_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r253, %r254, %r255, %r256}, [nvvm_tcgen05_st_32x32b_unpack_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.32x32b.x128.unpack::16b.b32 [%r1], {%r253, %r254, %r255, %r256, %r249, %r250, %r251, %r252, %r245, %r246, %r247, %r248, %r241, %r242, %r243, %r244, %r237, %r238, %r239, %r240, %r233, %r234, %r235, %r236, %r229, %r230, %r231, %r232, %r225, %r226, %r227, %r228, %r221, %r222, %r223, %r224, %r217, %r218, %r219, %r220, %r213, %r214, %r215, %r216, %r209, %r210, %r211, %r212, %r205, %r206, %r207, %r208, %r201, %r202, %r203, %r204, %r197, %r198, %r199, %r200, %r193, %r194, %r195, %r196, %r189, %r190, %r191, %r192, %r185, %r186, %r187, %r188, %r181, %r182, %r183, %r184, %r177, %r178, %r179, %r180, %r173, %r174, %r175, %r176, %r169, %r170, %r171, %r172, %r165, %r166, %r167, %r168, %r161, %r162, %r163, %r164, %r157, %r158, %r159, %r160, %r153, %r154, %r155, %r156, %r149, %r150, %r151, %r152, %r145, %r146, %r147, %r148, %r141, %r142, %r143, %r144, %r137, %r138, %r139, %r140, %r133, %r134, %r135, %r136, %r129, %r130, %r131, %r132};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x1(ptr addrspace(6) %taddr, i32 %stv1, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x2(ptr addrspace(6) %taddr, <2 x i32> %stv2, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x4(ptr addrspace(6) %taddr, <4 x i32> %stv4, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x8(ptr addrspace(6) %taddr, <8 x i32> %stv8, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x16(ptr addrspace(6) %taddr, <16 x i32> %stv16, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x32(ptr addrspace(6) %taddr, <32 x i32> %stv32, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x64(ptr addrspace(6) %taddr, <64 x i32> %stv64, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.32x32b.x128(ptr addrspace(6) %taddr, <128 x i32> %stv128, i1 1)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_16x32bx2
+define void @nvvm_tcgen05_st_16x32bx2(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_16x32bx2(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_16x32bx2_param_0];
+; CHECK-NEXT:    ld.param.u32 %r2, [nvvm_tcgen05_st_16x32bx2_param_1];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x1.b32 [%r1], 2, {%r2};
+; CHECK-NEXT:    ld.param.v2.u32 {%r3, %r4}, [nvvm_tcgen05_st_16x32bx2_param_2];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x2.b32 [%r1], 2, {%r3, %r4};
+; CHECK-NEXT:    ld.param.v4.u32 {%r5, %r6, %r7, %r8}, [nvvm_tcgen05_st_16x32bx2_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x4.b32 [%r1], 2, {%r5, %r6, %r7, %r8};
+; CHECK-NEXT:    ld.param.v4.u32 {%r9, %r10, %r11, %r12}, [nvvm_tcgen05_st_16x32bx2_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r13, %r14, %r15, %r16}, [nvvm_tcgen05_st_16x32bx2_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x8.b32 [%r1], 2, {%r13, %r14, %r15, %r16, %r9, %r10, %r11, %r12};
+; CHECK-NEXT:    ld.param.v4.u32 {%r17, %r18, %r19, %r20}, [nvvm_tcgen05_st_16x32bx2_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r21, %r22, %r23, %r24}, [nvvm_tcgen05_st_16x32bx2_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r25, %r26, %r27, %r28}, [nvvm_tcgen05_st_16x32bx2_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r29, %r30, %r31, %r32}, [nvvm_tcgen05_st_16x32bx2_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x16.b32 [%r1], 2, {%r29, %r30, %r31, %r32, %r25, %r26, %r27, %r28, %r21, %r22, %r23, %r24, %r17, %r18, %r19, %r20};
+; CHECK-NEXT:    ld.param.v4.u32 {%r33, %r34, %r35, %r36}, [nvvm_tcgen05_st_16x32bx2_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r37, %r38, %r39, %r40}, [nvvm_tcgen05_st_16x32bx2_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r41, %r42, %r43, %r44}, [nvvm_tcgen05_st_16x32bx2_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r45, %r46, %r47, %r48}, [nvvm_tcgen05_st_16x32bx2_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r49, %r50, %r51, %r52}, [nvvm_tcgen05_st_16x32bx2_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r53, %r54, %r55, %r56}, [nvvm_tcgen05_st_16x32bx2_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r57, %r58, %r59, %r60}, [nvvm_tcgen05_st_16x32bx2_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r61, %r62, %r63, %r64}, [nvvm_tcgen05_st_16x32bx2_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x32.b32 [%r1], 2, {%r61, %r62, %r63, %r64, %r57, %r58, %r59, %r60, %r53, %r54, %r55, %r56, %r49, %r50, %r51, %r52, %r45, %r46, %r47, %r48, %r41, %r42, %r43, %r44, %r37, %r38, %r39, %r40, %r33, %r34, %r35, %r36};
+; CHECK-NEXT:    ld.param.v4.u32 {%r65, %r66, %r67, %r68}, [nvvm_tcgen05_st_16x32bx2_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r69, %r70, %r71, %r72}, [nvvm_tcgen05_st_16x32bx2_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r73, %r74, %r75, %r76}, [nvvm_tcgen05_st_16x32bx2_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r77, %r78, %r79, %r80}, [nvvm_tcgen05_st_16x32bx2_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r81, %r82, %r83, %r84}, [nvvm_tcgen05_st_16x32bx2_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r85, %r86, %r87, %r88}, [nvvm_tcgen05_st_16x32bx2_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r89, %r90, %r91, %r92}, [nvvm_tcgen05_st_16x32bx2_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r93, %r94, %r95, %r96}, [nvvm_tcgen05_st_16x32bx2_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r97, %r98, %r99, %r100}, [nvvm_tcgen05_st_16x32bx2_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r101, %r102, %r103, %r104}, [nvvm_tcgen05_st_16x32bx2_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r105, %r106, %r107, %r108}, [nvvm_tcgen05_st_16x32bx2_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r109, %r110, %r111, %r112}, [nvvm_tcgen05_st_16x32bx2_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r113, %r114, %r115, %r116}, [nvvm_tcgen05_st_16x32bx2_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r117, %r118, %r119, %r120}, [nvvm_tcgen05_st_16x32bx2_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r121, %r122, %r123, %r124}, [nvvm_tcgen05_st_16x32bx2_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r125, %r126, %r127, %r128}, [nvvm_tcgen05_st_16x32bx2_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x64.b32 [%r1], 2, {%r125, %r126, %r127, %r128, %r121, %r122, %r123, %r124, %r117, %r118, %r119, %r120, %r113, %r114, %r115, %r116, %r109, %r110, %r111, %r112, %r105, %r106, %r107, %r108, %r101, %r102, %r103, %r104, %r97, %r98, %r99, %r100, %r93, %r94, %r95, %r96, %r89, %r90, %r91, %r92, %r85, %r86, %r87, %r88, %r81, %r82, %r83, %r84, %r77, %r78, %r79, %r80, %r73, %r74, %r75, %r76, %r69, %r70, %r71, %r72, %r65, %r66, %r67, %r68};
+; CHECK-NEXT:    ld.param.v4.u32 {%r129, %r130, %r131, %r132}, [nvvm_tcgen05_st_16x32bx2_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r133, %r134, %r135, %r136}, [nvvm_tcgen05_st_16x32bx2_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r137, %r138, %r139, %r140}, [nvvm_tcgen05_st_16x32bx2_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r141, %r142, %r143, %r144}, [nvvm_tcgen05_st_16x32bx2_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r145, %r146, %r147, %r148}, [nvvm_tcgen05_st_16x32bx2_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r149, %r150, %r151, %r152}, [nvvm_tcgen05_st_16x32bx2_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r153, %r154, %r155, %r156}, [nvvm_tcgen05_st_16x32bx2_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r157, %r158, %r159, %r160}, [nvvm_tcgen05_st_16x32bx2_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r161, %r162, %r163, %r164}, [nvvm_tcgen05_st_16x32bx2_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r165, %r166, %r167, %r168}, [nvvm_tcgen05_st_16x32bx2_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r169, %r170, %r171, %r172}, [nvvm_tcgen05_st_16x32bx2_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r173, %r174, %r175, %r176}, [nvvm_tcgen05_st_16x32bx2_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r177, %r178, %r179, %r180}, [nvvm_tcgen05_st_16x32bx2_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r181, %r182, %r183, %r184}, [nvvm_tcgen05_st_16x32bx2_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r185, %r186, %r187, %r188}, [nvvm_tcgen05_st_16x32bx2_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r189, %r190, %r191, %r192}, [nvvm_tcgen05_st_16x32bx2_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r193, %r194, %r195, %r196}, [nvvm_tcgen05_st_16x32bx2_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r197, %r198, %r199, %r200}, [nvvm_tcgen05_st_16x32bx2_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r201, %r202, %r203, %r204}, [nvvm_tcgen05_st_16x32bx2_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r205, %r206, %r207, %r208}, [nvvm_tcgen05_st_16x32bx2_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r209, %r210, %r211, %r212}, [nvvm_tcgen05_st_16x32bx2_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r213, %r214, %r215, %r216}, [nvvm_tcgen05_st_16x32bx2_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r217, %r218, %r219, %r220}, [nvvm_tcgen05_st_16x32bx2_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r221, %r222, %r223, %r224}, [nvvm_tcgen05_st_16x32bx2_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r225, %r226, %r227, %r228}, [nvvm_tcgen05_st_16x32bx2_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r229, %r230, %r231, %r232}, [nvvm_tcgen05_st_16x32bx2_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r233, %r234, %r235, %r236}, [nvvm_tcgen05_st_16x32bx2_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r237, %r238, %r239, %r240}, [nvvm_tcgen05_st_16x32bx2_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r241, %r242, %r243, %r244}, [nvvm_tcgen05_st_16x32bx2_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r245, %r246, %r247, %r248}, [nvvm_tcgen05_st_16x32bx2_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r249, %r250, %r251, %r252}, [nvvm_tcgen05_st_16x32bx2_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r253, %r254, %r255, %r256}, [nvvm_tcgen05_st_16x32bx2_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x128.b32 [%r1], 2, {%r253, %r254, %r255, %r256, %r249, %r250, %r251, %r252, %r245, %r246, %r247, %r248, %r241, %r242, %r243, %r244, %r237, %r238, %r239, %r240, %r233, %r234, %r235, %r236, %r229, %r230, %r231, %r232, %r225, %r226, %r227, %r228, %r221, %r222, %r223, %r224, %r217, %r218, %r219, %r220, %r213, %r214, %r215, %r216, %r209, %r210, %r211, %r212, %r205, %r206, %r207, %r208, %r201, %r202, %r203, %r204, %r197, %r198, %r199, %r200, %r193, %r194, %r195, %r196, %r189, %r190, %r191, %r192, %r185, %r186, %r187, %r188, %r181, %r182, %r183, %r184, %r177, %r178, %r179, %r180, %r173, %r174, %r175, %r176, %r169, %r170, %r171, %r172, %r165, %r166, %r167, %r168, %r161, %r162, %r163, %r164, %r157, %r158, %r159, %r160, %r153, %r154, %r155, %r156, %r149, %r150, %r151, %r152, %r145, %r146, %r147, %r148, %r141, %r142, %r143, %r144, %r137, %r138, %r139, %r140, %r133, %r134, %r135, %r136, %r129, %r130, %r131, %r132};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x1(ptr addrspace(6) %taddr, i64 2, i32 %stv1, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x2(ptr addrspace(6) %taddr, i64 2, <2 x i32> %stv2, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x4(ptr addrspace(6) %taddr, i64 2, <4 x i32> %stv4, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x8(ptr addrspace(6) %taddr, i64 2, <8 x i32> %stv8, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x16(ptr addrspace(6) %taddr, i64 2, <16 x i32> %stv16, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x32(ptr addrspace(6) %taddr, i64 2, <32 x i32> %stv32, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x64(ptr addrspace(6) %taddr, i64 2, <64 x i32> %stv64, i1 0)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x128(ptr addrspace(6) %taddr, i64 2, <128 x i32> %stv128, i1 0)
+  ret void
+}
+
+; CHECK-LABEL: nvvm_tcgen05_st_16x32bx2_unpack
+define void @nvvm_tcgen05_st_16x32bx2_unpack(ptr addrspace(6) %taddr, i32 %stv1, <2 x i32> %stv2, <4 x i32> %stv4, <8 x i32> %stv8, <16 x i32> %stv16, <32 x i32> %stv32, <64 x i32> %stv64, <128 x i32> %stv128) {
+; CHECK-LABEL: nvvm_tcgen05_st_16x32bx2_unpack(
+; CHECK:       {
+; CHECK-NEXT:    .reg .b32 %r<257>;
+; CHECK-EMPTY:
+; CHECK-NEXT:  // %bb.0:
+; CHECK-NEXT:    ld.param.u32 %r1, [nvvm_tcgen05_st_16x32bx2_unpack_param_0];
+; CHECK-NEXT:    ld.param.u32 %r2, [nvvm_tcgen05_st_16x32bx2_unpack_param_1];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x1.unpack::16b.b32 [%r1], 2, {%r2};
+; CHECK-NEXT:    ld.param.v2.u32 {%r3, %r4}, [nvvm_tcgen05_st_16x32bx2_unpack_param_2];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x2.unpack::16b.b32 [%r1], 2, {%r3, %r4};
+; CHECK-NEXT:    ld.param.v4.u32 {%r5, %r6, %r7, %r8}, [nvvm_tcgen05_st_16x32bx2_unpack_param_3];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x4.unpack::16b.b32 [%r1], 2, {%r5, %r6, %r7, %r8};
+; CHECK-NEXT:    ld.param.v4.u32 {%r9, %r10, %r11, %r12}, [nvvm_tcgen05_st_16x32bx2_unpack_param_4+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r13, %r14, %r15, %r16}, [nvvm_tcgen05_st_16x32bx2_unpack_param_4];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x8.unpack::16b.b32 [%r1], 2, {%r13, %r14, %r15, %r16, %r9, %r10, %r11, %r12};
+; CHECK-NEXT:    ld.param.v4.u32 {%r17, %r18, %r19, %r20}, [nvvm_tcgen05_st_16x32bx2_unpack_param_5+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r21, %r22, %r23, %r24}, [nvvm_tcgen05_st_16x32bx2_unpack_param_5+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r25, %r26, %r27, %r28}, [nvvm_tcgen05_st_16x32bx2_unpack_param_5+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r29, %r30, %r31, %r32}, [nvvm_tcgen05_st_16x32bx2_unpack_param_5];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x16.unpack::16b.b32 [%r1], 2, {%r29, %r30, %r31, %r32, %r25, %r26, %r27, %r28, %r21, %r22, %r23, %r24, %r17, %r18, %r19, %r20};
+; CHECK-NEXT:    ld.param.v4.u32 {%r33, %r34, %r35, %r36}, [nvvm_tcgen05_st_16x32bx2_unpack_param_6+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r37, %r38, %r39, %r40}, [nvvm_tcgen05_st_16x32bx2_unpack_param_6+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r41, %r42, %r43, %r44}, [nvvm_tcgen05_st_16x32bx2_unpack_param_6+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r45, %r46, %r47, %r48}, [nvvm_tcgen05_st_16x32bx2_unpack_param_6+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r49, %r50, %r51, %r52}, [nvvm_tcgen05_st_16x32bx2_unpack_param_6+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r53, %r54, %r55, %r56}, [nvvm_tcgen05_st_16x32bx2_unpack_param_6+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r57, %r58, %r59, %r60}, [nvvm_tcgen05_st_16x32bx2_unpack_param_6+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r61, %r62, %r63, %r64}, [nvvm_tcgen05_st_16x32bx2_unpack_param_6];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x32.unpack::16b.b32 [%r1], 2, {%r61, %r62, %r63, %r64, %r57, %r58, %r59, %r60, %r53, %r54, %r55, %r56, %r49, %r50, %r51, %r52, %r45, %r46, %r47, %r48, %r41, %r42, %r43, %r44, %r37, %r38, %r39, %r40, %r33, %r34, %r35, %r36};
+; CHECK-NEXT:    ld.param.v4.u32 {%r65, %r66, %r67, %r68}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r69, %r70, %r71, %r72}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r73, %r74, %r75, %r76}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r77, %r78, %r79, %r80}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r81, %r82, %r83, %r84}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r85, %r86, %r87, %r88}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r89, %r90, %r91, %r92}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r93, %r94, %r95, %r96}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r97, %r98, %r99, %r100}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r101, %r102, %r103, %r104}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r105, %r106, %r107, %r108}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r109, %r110, %r111, %r112}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r113, %r114, %r115, %r116}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r117, %r118, %r119, %r120}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r121, %r122, %r123, %r124}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r125, %r126, %r127, %r128}, [nvvm_tcgen05_st_16x32bx2_unpack_param_7];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x64.unpack::16b.b32 [%r1], 2, {%r125, %r126, %r127, %r128, %r121, %r122, %r123, %r124, %r117, %r118, %r119, %r120, %r113, %r114, %r115, %r116, %r109, %r110, %r111, %r112, %r105, %r106, %r107, %r108, %r101, %r102, %r103, %r104, %r97, %r98, %r99, %r100, %r93, %r94, %r95, %r96, %r89, %r90, %r91, %r92, %r85, %r86, %r87, %r88, %r81, %r82, %r83, %r84, %r77, %r78, %r79, %r80, %r73, %r74, %r75, %r76, %r69, %r70, %r71, %r72, %r65, %r66, %r67, %r68};
+; CHECK-NEXT:    ld.param.v4.u32 {%r129, %r130, %r131, %r132}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+496];
+; CHECK-NEXT:    ld.param.v4.u32 {%r133, %r134, %r135, %r136}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+480];
+; CHECK-NEXT:    ld.param.v4.u32 {%r137, %r138, %r139, %r140}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+464];
+; CHECK-NEXT:    ld.param.v4.u32 {%r141, %r142, %r143, %r144}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+448];
+; CHECK-NEXT:    ld.param.v4.u32 {%r145, %r146, %r147, %r148}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+432];
+; CHECK-NEXT:    ld.param.v4.u32 {%r149, %r150, %r151, %r152}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+416];
+; CHECK-NEXT:    ld.param.v4.u32 {%r153, %r154, %r155, %r156}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+400];
+; CHECK-NEXT:    ld.param.v4.u32 {%r157, %r158, %r159, %r160}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+384];
+; CHECK-NEXT:    ld.param.v4.u32 {%r161, %r162, %r163, %r164}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+368];
+; CHECK-NEXT:    ld.param.v4.u32 {%r165, %r166, %r167, %r168}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+352];
+; CHECK-NEXT:    ld.param.v4.u32 {%r169, %r170, %r171, %r172}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+336];
+; CHECK-NEXT:    ld.param.v4.u32 {%r173, %r174, %r175, %r176}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+320];
+; CHECK-NEXT:    ld.param.v4.u32 {%r177, %r178, %r179, %r180}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+304];
+; CHECK-NEXT:    ld.param.v4.u32 {%r181, %r182, %r183, %r184}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+288];
+; CHECK-NEXT:    ld.param.v4.u32 {%r185, %r186, %r187, %r188}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+272];
+; CHECK-NEXT:    ld.param.v4.u32 {%r189, %r190, %r191, %r192}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+256];
+; CHECK-NEXT:    ld.param.v4.u32 {%r193, %r194, %r195, %r196}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+240];
+; CHECK-NEXT:    ld.param.v4.u32 {%r197, %r198, %r199, %r200}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+224];
+; CHECK-NEXT:    ld.param.v4.u32 {%r201, %r202, %r203, %r204}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+208];
+; CHECK-NEXT:    ld.param.v4.u32 {%r205, %r206, %r207, %r208}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+192];
+; CHECK-NEXT:    ld.param.v4.u32 {%r209, %r210, %r211, %r212}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+176];
+; CHECK-NEXT:    ld.param.v4.u32 {%r213, %r214, %r215, %r216}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+160];
+; CHECK-NEXT:    ld.param.v4.u32 {%r217, %r218, %r219, %r220}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+144];
+; CHECK-NEXT:    ld.param.v4.u32 {%r221, %r222, %r223, %r224}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+128];
+; CHECK-NEXT:    ld.param.v4.u32 {%r225, %r226, %r227, %r228}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+112];
+; CHECK-NEXT:    ld.param.v4.u32 {%r229, %r230, %r231, %r232}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+96];
+; CHECK-NEXT:    ld.param.v4.u32 {%r233, %r234, %r235, %r236}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+80];
+; CHECK-NEXT:    ld.param.v4.u32 {%r237, %r238, %r239, %r240}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+64];
+; CHECK-NEXT:    ld.param.v4.u32 {%r241, %r242, %r243, %r244}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+48];
+; CHECK-NEXT:    ld.param.v4.u32 {%r245, %r246, %r247, %r248}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+32];
+; CHECK-NEXT:    ld.param.v4.u32 {%r249, %r250, %r251, %r252}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8+16];
+; CHECK-NEXT:    ld.param.v4.u32 {%r253, %r254, %r255, %r256}, [nvvm_tcgen05_st_16x32bx2_unpack_param_8];
+; CHECK-NEXT:    tcgen05.st.sync.aligned.16x32bx2.x128.unpack::16b.b32 [%r1], 2, {%r253, %r254, %r255, %r256, %r249, %r250, %r251, %r252, %r245, %r246, %r247, %r248, %r241, %r242, %r243, %r244, %r237, %r238, %r239, %r240, %r233, %r234, %r235, %r236, %r229, %r230, %r231, %r232, %r225, %r226, %r227, %r228, %r221, %r222, %r223, %r224, %r217, %r218, %r219, %r220, %r213, %r214, %r215, %r216, %r209, %r210, %r211, %r212, %r205, %r206, %r207, %r208, %r201, %r202, %r203, %r204, %r197, %r198, %r199, %r200, %r193, %r194, %r195, %r196, %r189, %r190, %r191, %r192, %r185, %r186, %r187, %r188, %r181, %r182, %r183, %r184, %r177, %r178, %r179, %r180, %r173, %r174, %r175, %r176, %r169, %r170, %r171, %r172, %r165, %r166, %r167, %r168, %r161, %r162, %r163, %r164, %r157, %r158, %r159, %r160, %r153, %r154, %r155, %r156, %r149, %r150, %r151, %r152, %r145, %r146, %r147, %r148, %r141, %r142, %r143, %r144, %r137, %r138, %r139, %r140, %r133, %r134, %r135, %r136, %r129, %r130, %r131, %r132};
+; CHECK-NEXT:    ret;
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x1(ptr addrspace(6) %taddr, i64 2, i32 %stv1, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x2(ptr addrspace(6) %taddr, i64 2, <2 x i32> %stv2, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x4(ptr addrspace(6) %taddr, i64 2, <4 x i32> %stv4, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x8(ptr addrspace(6) %taddr, i64 2, <8 x i32> %stv8, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x16(ptr addrspace(6) %taddr, i64 2, <16 x i32> %stv16, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x32(ptr addrspace(6) %taddr, i64 2, <32 x i32> %stv32, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x64(ptr addrspace(6) %taddr, i64 2, <64 x i32> %stv64, i1 1)
+
+  tail call void @llvm.nvvm.tcgen05.st.16x32bx2.x128(ptr addrspace(6) %taddr, i64 2, <128 x i32> %stv128, i1 1)
+  ret void
+}



More information about the llvm-commits mailing list