[libcxx-commits] [libcxx] [libcxx] Implementation of P1831R1 (PR #101439)

Josh Karns via libcxx-commits libcxx-commits at lists.llvm.org
Wed Jul 31 18:29:42 PDT 2024


https://github.com/jkarns275 created https://github.com/llvm/llvm-project/pull/101439

[P1831R1](https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2020/p1831r1.html) introduces 2 key sets of changes:

1. Remove volatile variants of `tuple_size`, `tuple_element`, `variant_size`, and `variant_alternative`.
2. Require that the volatile overload of many methods of `atomic<T>` require that `atomic<T>::is_always_lock_free` is true.

In the case of (2) I am wondering if this should be feature gated? Currently it will add the restriction regardless of `_LIBCPP_STD_VER` - should I gate this to versions >= 20?

I'm also unsure if I'm supposed to update the status page myself.

>From 92261c5c95f503d01465048cd1d8e5e36e5ced14 Mon Sep 17 00:00:00 2001
From: Joshua Karns <jkarns275 at gmail.com>
Date: Wed, 31 Jul 2024 20:42:59 -0400
Subject: [PATCH 1/2] Initial implementation of P1831R1

---
 libcxx/include/__atomic/atomic.h       |  61 ++++++++++---
 libcxx/include/__atomic/atomic_base.h  | 117 ++++++++++++++++++++-----
 libcxx/include/__tuple/tuple_element.h |  10 ---
 libcxx/include/__tuple/tuple_size.h    |  11 ---
 libcxx/include/variant                 |  12 ---
 5 files changed, 146 insertions(+), 65 deletions(-)

diff --git a/libcxx/include/__atomic/atomic.h b/libcxx/include/__atomic/atomic.h
index bcea21f5ce2e1..ceb9ae773940e 100644
--- a/libcxx/include/__atomic/atomic.h
+++ b/libcxx/include/__atomic/atomic.h
@@ -46,7 +46,9 @@ struct atomic : public __atomic_base<_Tp> {
 
   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
 
-  _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile _NOEXCEPT {
+  _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile _NOEXCEPT 
+    requires __base::is_always_lock_free
+  {
     __base::store(__d);
     return __d;
   }
@@ -71,7 +73,9 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
 
   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
 
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) volatile _NOEXCEPT {
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) volatile _NOEXCEPT 
+    requires __base::is_always_lock_free
+  {
     __base::store(__d);
     return __d;
   }
@@ -80,7 +84,9 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
     return __d;
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+  _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 
+    requires __base::is_always_lock_free
+  {
     // __atomic_fetch_add accepts function pointers, guard against them.
     static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
     return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
@@ -92,7 +98,9 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
     return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+  _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 
+    requires __base::is_always_lock_free
+  {
     // __atomic_fetch_add accepts function pointers, guard against them.
     static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
     return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
@@ -104,18 +112,47 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
     return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) volatile _NOEXCEPT { return fetch_add(1); }
   _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) _NOEXCEPT { return fetch_add(1); }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) volatile _NOEXCEPT { return fetch_sub(1); }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) volatile _NOEXCEPT
+    requires __base::is_always_lock_free 
+  {
+    return fetch_add(1);
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) _NOEXCEPT { return fetch_sub(1); }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator++() volatile _NOEXCEPT { return fetch_add(1) + 1; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_sub(1);
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator++() _NOEXCEPT { return fetch_add(1) + 1; }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator--() volatile _NOEXCEPT { return fetch_sub(1) - 1; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator++() volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_add(1) + 1;
+  }
+  
   _LIBCPP_HIDE_FROM_ABI _Tp* operator--() _NOEXCEPT { return fetch_sub(1) - 1; }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator--() volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_sub(1) - 1;
+  }
+  
   _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT { return fetch_add(__op) + __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT 
+    requires __base::is_always_lock_free
+  {
+    return fetch_add(__op) + __op;
+  }
+  
   _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_sub(__op) - __op;
+  }
 
   atomic& operator=(const atomic&)          = delete;
   atomic& operator=(const atomic&) volatile = delete;
@@ -267,7 +304,9 @@ _LIBCPP_HIDE_FROM_ABI bool atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
 
 template <class _Tp>
 _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_HIDE_FROM_ABI void
-atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
+atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
+  requires __atomic_base<_Tp>::is_always_lock_free
+{
   std::__cxx_atomic_init(std::addressof(__o->__a_), __d);
 }
 
diff --git a/libcxx/include/__atomic/atomic_base.h b/libcxx/include/__atomic/atomic_base.h
index 93f5c4cff0d1b..d9a9aa341a600 100644
--- a/libcxx/include/__atomic/atomic_base.h
+++ b/libcxx/include/__atomic/atomic_base.h
@@ -43,7 +43,7 @@ struct __atomic_base // false
     return static_cast<__atomic_base const volatile*>(this)->is_lock_free();
   }
   _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
-      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
+    requires is_always_lock_free _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
     std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
   }
   _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
@@ -51,7 +51,7 @@ struct __atomic_base // false
     std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
   }
   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
-      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
+    requires is_always_lock_free _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
     return std::__cxx_atomic_load(std::addressof(__a_), __m);
   }
   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
@@ -60,7 +60,8 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI operator _Tp() const volatile _NOEXCEPT { return load(); }
   _LIBCPP_HIDE_FROM_ABI operator _Tp() const _NOEXCEPT { return load(); }
-  _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+  _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+    requires is_always_lock_free {
     return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m);
   }
   _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
@@ -68,7 +69,7 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT
-      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
+    requires is_always_lock_free _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
     return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
   }
   _LIBCPP_HIDE_FROM_ABI bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
@@ -77,7 +78,7 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT
-      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
+    requires is_always_lock_free _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
     return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
   }
   _LIBCPP_HIDE_FROM_ABI bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
@@ -85,7 +86,8 @@ struct __atomic_base // false
     return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
   }
   _LIBCPP_HIDE_FROM_ABI bool
-  compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+  compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+    requires is_always_lock_free {
     return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
   }
   _LIBCPP_HIDE_FROM_ABI bool
@@ -93,7 +95,8 @@ struct __atomic_base // false
     return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
   }
   _LIBCPP_HIDE_FROM_ABI bool
-  compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+  compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+    requires is_always_lock_free {
     return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
   }
   _LIBCPP_HIDE_FROM_ABI bool
@@ -141,55 +144,127 @@ struct __atomic_base<_Tp, true> : public __atomic_base<_Tp, false> {
 
   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
 
-  _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+  _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
     return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
   }
-  _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+
+  _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
     return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
   }
-  _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+
+  _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
     return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m);
   }
-  _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+
+  _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
     return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m);
   }
-  _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+
+  _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
     return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m);
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT { return fetch_add(_Tp(1)); }
   _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) _NOEXCEPT { return fetch_add(_Tp(1)); }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) volatile _NOEXCEPT { return fetch_sub(_Tp(1)); }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT
+  requires __base::is_always_lock_free
+  {
+    return fetch_add(_Tp(1));
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) _NOEXCEPT { return fetch_sub(_Tp(1)); }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator++() volatile _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_sub(_Tp(1));
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator++() _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator--() volatile _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator++() volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_add(_Tp(1)) + _Tp(1);
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator--() _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator--() volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_sub(_Tp(1)) - _Tp(1);
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) _NOEXCEPT { return fetch_add(__op) + __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_add(__op) + __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) volatile _NOEXCEPT { return fetch_and(__op) & __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_sub(__op) - __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) _NOEXCEPT { return fetch_and(__op) & __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) volatile _NOEXCEPT { return fetch_or(__op) | __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_and(__op) & __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) _NOEXCEPT { return fetch_or(__op) | __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) volatile _NOEXCEPT { return fetch_xor(__op) ^ __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_or(__op) | __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) _NOEXCEPT { return fetch_xor(__op) ^ __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) volatile _NOEXCEPT
+    requires __base::is_always_lock_free
+  {
+    return fetch_xor(__op) ^ __op;
+  }
 };
 
 // Here we need _IsIntegral because the default template argument is not enough
diff --git a/libcxx/include/__tuple/tuple_element.h b/libcxx/include/__tuple/tuple_element.h
index 9127c47dc8f1a..bcffc540eaf02 100644
--- a/libcxx/include/__tuple/tuple_element.h
+++ b/libcxx/include/__tuple/tuple_element.h
@@ -28,16 +28,6 @@ struct _LIBCPP_TEMPLATE_VIS tuple_element<_Ip, const _Tp> {
   typedef _LIBCPP_NODEBUG const typename tuple_element<_Ip, _Tp>::type type;
 };
 
-template <size_t _Ip, class _Tp>
-struct _LIBCPP_TEMPLATE_VIS tuple_element<_Ip, volatile _Tp> {
-  typedef _LIBCPP_NODEBUG volatile typename tuple_element<_Ip, _Tp>::type type;
-};
-
-template <size_t _Ip, class _Tp>
-struct _LIBCPP_TEMPLATE_VIS tuple_element<_Ip, const volatile _Tp> {
-  typedef _LIBCPP_NODEBUG const volatile typename tuple_element<_Ip, _Tp>::type type;
-};
-
 #ifndef _LIBCPP_CXX03_LANG
 
 template <size_t _Ip, class... _Types>
diff --git a/libcxx/include/__tuple/tuple_size.h b/libcxx/include/__tuple/tuple_size.h
index 18a17fd4d5878..8dc6bfdb828d8 100644
--- a/libcxx/include/__tuple/tuple_size.h
+++ b/libcxx/include/__tuple/tuple_size.h
@@ -35,17 +35,6 @@ struct _LIBCPP_TEMPLATE_VIS tuple_size<__enable_if_tuple_size_imp< const _Tp,
                                                                    integral_constant<size_t, sizeof(tuple_size<_Tp>)>>>
     : public integral_constant<size_t, tuple_size<_Tp>::value> {};
 
-template <class _Tp>
-struct _LIBCPP_TEMPLATE_VIS tuple_size<__enable_if_tuple_size_imp< volatile _Tp,
-                                                                   __enable_if_t<!is_const<_Tp>::value>,
-                                                                   integral_constant<size_t, sizeof(tuple_size<_Tp>)>>>
-    : public integral_constant<size_t, tuple_size<_Tp>::value> {};
-
-template <class _Tp>
-struct _LIBCPP_TEMPLATE_VIS
-tuple_size<__enable_if_tuple_size_imp<const volatile _Tp, integral_constant<size_t, sizeof(tuple_size<_Tp>)>>>
-    : public integral_constant<size_t, tuple_size<_Tp>::value> {};
-
 #else
 template <class _Tp>
 struct _LIBCPP_TEMPLATE_VIS tuple_size<const _Tp> : public tuple_size<_Tp> {};
diff --git a/libcxx/include/variant b/libcxx/include/variant
index 5f2d03b7227b8..94767e482bcca 100644
--- a/libcxx/include/variant
+++ b/libcxx/include/variant
@@ -317,12 +317,6 @@ inline constexpr size_t variant_size_v = variant_size<_Tp>::value;
 template <class _Tp>
 struct _LIBCPP_TEMPLATE_VIS variant_size<const _Tp> : variant_size<_Tp> {};
 
-template <class _Tp>
-struct _LIBCPP_TEMPLATE_VIS variant_size<volatile _Tp> : variant_size<_Tp> {};
-
-template <class _Tp>
-struct _LIBCPP_TEMPLATE_VIS variant_size<const volatile _Tp> : variant_size<_Tp> {};
-
 template <class... _Types>
 struct _LIBCPP_TEMPLATE_VIS variant_size<variant<_Types...>> : integral_constant<size_t, sizeof...(_Types)> {};
 
@@ -335,12 +329,6 @@ using variant_alternative_t = typename variant_alternative<_Ip, _Tp>::type;
 template <size_t _Ip, class _Tp>
 struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, const _Tp> : add_const<variant_alternative_t<_Ip, _Tp>> {};
 
-template <size_t _Ip, class _Tp>
-struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, volatile _Tp> : add_volatile<variant_alternative_t<_Ip, _Tp>> {};
-
-template <size_t _Ip, class _Tp>
-struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, const volatile _Tp> : add_cv<variant_alternative_t<_Ip, _Tp>> {};
-
 template <size_t _Ip, class... _Types>
 struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, variant<_Types...>> {
   static_assert(_Ip < sizeof...(_Types), "Index out of bounds in std::variant_alternative<>");

>From c7f73947eca6d144e4edc2c0d2f95c2c0f2e312b Mon Sep 17 00:00:00 2001
From: Joshua Karns <jkarns275 at gmail.com>
Date: Wed, 31 Jul 2024 20:55:41 -0400
Subject: [PATCH 2/2] ran clang-format

---
 libcxx/include/__atomic/atomic.h      | 18 +++++++++---------
 libcxx/include/__atomic/atomic_base.h | 23 +++++++++++++++--------
 2 files changed, 24 insertions(+), 17 deletions(-)

diff --git a/libcxx/include/__atomic/atomic.h b/libcxx/include/__atomic/atomic.h
index ceb9ae773940e..74c7d49c65055 100644
--- a/libcxx/include/__atomic/atomic.h
+++ b/libcxx/include/__atomic/atomic.h
@@ -46,7 +46,7 @@ struct atomic : public __atomic_base<_Tp> {
 
   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
 
-  _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile _NOEXCEPT 
+  _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile _NOEXCEPT
     requires __base::is_always_lock_free
   {
     __base::store(__d);
@@ -73,7 +73,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
 
   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
 
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) volatile _NOEXCEPT 
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
     requires __base::is_always_lock_free
   {
     __base::store(__d);
@@ -84,7 +84,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
     return __d;
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 
+  _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
     requires __base::is_always_lock_free
   {
     // __atomic_fetch_add accepts function pointers, guard against them.
@@ -98,7 +98,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
     return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 
+  _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
     requires __base::is_always_lock_free
   {
     // __atomic_fetch_add accepts function pointers, guard against them.
@@ -114,7 +114,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
 
   _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) _NOEXCEPT { return fetch_add(1); }
   _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) volatile _NOEXCEPT
-    requires __base::is_always_lock_free 
+    requires __base::is_always_lock_free
   {
     return fetch_add(1);
   }
@@ -132,21 +132,21 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
   {
     return fetch_add(1) + 1;
   }
-  
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator--() _NOEXCEPT { return fetch_sub(1) - 1; }
   _LIBCPP_HIDE_FROM_ABI _Tp* operator--() volatile _NOEXCEPT
     requires __base::is_always_lock_free
   {
     return fetch_sub(1) - 1;
   }
-  
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT { return fetch_add(__op) + __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT 
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT
     requires __base::is_always_lock_free
   {
     return fetch_add(__op) + __op;
   }
-  
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
   _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT
     requires __base::is_always_lock_free
diff --git a/libcxx/include/__atomic/atomic_base.h b/libcxx/include/__atomic/atomic_base.h
index d9a9aa341a600..35a6fb00dbc3e 100644
--- a/libcxx/include/__atomic/atomic_base.h
+++ b/libcxx/include/__atomic/atomic_base.h
@@ -43,7 +43,8 @@ struct __atomic_base // false
     return static_cast<__atomic_base const volatile*>(this)->is_lock_free();
   }
   _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
-    requires is_always_lock_free _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
+    requires is_always_lock_free
+  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
     std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
   }
   _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
@@ -51,7 +52,8 @@ struct __atomic_base // false
     std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
   }
   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
-    requires is_always_lock_free _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
+    requires is_always_lock_free
+  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
     return std::__cxx_atomic_load(std::addressof(__a_), __m);
   }
   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
@@ -61,7 +63,8 @@ struct __atomic_base // false
   _LIBCPP_HIDE_FROM_ABI operator _Tp() const volatile _NOEXCEPT { return load(); }
   _LIBCPP_HIDE_FROM_ABI operator _Tp() const _NOEXCEPT { return load(); }
   _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
-    requires is_always_lock_free {
+    requires is_always_lock_free
+  {
     return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m);
   }
   _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
@@ -69,7 +72,8 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT
-    requires is_always_lock_free _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
+    requires is_always_lock_free
+  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
     return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
   }
   _LIBCPP_HIDE_FROM_ABI bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
@@ -78,7 +82,8 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT
-    requires is_always_lock_free _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
+    requires is_always_lock_free
+  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
     return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
   }
   _LIBCPP_HIDE_FROM_ABI bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
@@ -87,7 +92,8 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
-    requires is_always_lock_free {
+    requires is_always_lock_free
+  {
     return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
   }
   _LIBCPP_HIDE_FROM_ABI bool
@@ -96,7 +102,8 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
-    requires is_always_lock_free {
+    requires is_always_lock_free
+  {
     return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
   }
   _LIBCPP_HIDE_FROM_ABI bool
@@ -197,7 +204,7 @@ struct __atomic_base<_Tp, true> : public __atomic_base<_Tp, false> {
   _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) _NOEXCEPT { return fetch_add(_Tp(1)); }
 
   _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT
-  requires __base::is_always_lock_free
+    requires __base::is_always_lock_free
   {
     return fetch_add(_Tp(1));
   }



More information about the libcxx-commits mailing list