[libcxx-commits] [libcxx] [libcxx][P1831R1] Deprecating volatile: library (PR #101439)

Josh Karns via libcxx-commits libcxx-commits at lists.llvm.org
Tue Aug 6 13:07:18 PDT 2024


https://github.com/jkarns275 updated https://github.com/llvm/llvm-project/pull/101439

>From f953761c292d77ae51799f8032163f6fc1bc64df Mon Sep 17 00:00:00 2001
From: Joshua Karns <jkarns275 at gmail.com>
Date: Wed, 31 Jul 2024 20:42:59 -0400
Subject: [PATCH] Implement P1831R1: deprecate volatile specializations of
 tuple  and variant helper methods, and deprecate many volatile atomic methods
 when that atomic type is not guaranteed to always be lock-free.

Closes #100038
---
 libcxx/include/__atomic/atomic.h              |  67 ++++---
 libcxx/include/__atomic/atomic_base.h         | 116 ++++++++++-
 libcxx/include/__config                       |  15 ++
 libcxx/include/__tuple/tuple_element.h        |   2 +
 libcxx/include/__tuple/tuple_size.h           |   8 +-
 libcxx/include/atomic                         | 188 ++++++++++++------
 libcxx/include/tuple                          |  13 +-
 libcxx/include/variant                        |  28 ++-
 ...tile_require_lock_free_in_cxx20.verify.cpp |  87 ++++++++
 ...ple_element_deprecated_in_cxx20.verify.cpp |  21 ++
 ..._tuple_size_deprecated_in_cxx20.verify.cpp |  21 ++
 .../volatile_deprecated.verify.cpp            |  31 +++
 12 files changed, 487 insertions(+), 110 deletions(-)
 create mode 100644 libcxx/test/libcxx/atomics/atomics.types.operations/atomic_volatile_require_lock_free_in_cxx20.verify.cpp
 create mode 100644 libcxx/test/libcxx/utilities/tuple/tuple.tuple/tuple.helper/volatile_tuple_element_deprecated_in_cxx20.verify.cpp
 create mode 100644 libcxx/test/libcxx/utilities/tuple/tuple.tuple/tuple.helper/volatile_tuple_size_deprecated_in_cxx20.verify.cpp
 create mode 100644 libcxx/test/libcxx/utilities/variant/variant.variant/variant.helper/volatile_deprecated.verify.cpp

diff --git a/libcxx/include/__atomic/atomic.h b/libcxx/include/__atomic/atomic.h
index bcea21f5ce2e1..bd1a22ecdbc98 100644
--- a/libcxx/include/__atomic/atomic.h
+++ b/libcxx/include/__atomic/atomic.h
@@ -47,6 +47,7 @@ struct atomic : public __atomic_base<_Tp> {
   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
 
   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     __base::store(__d);
     return __d;
   }
@@ -72,6 +73,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
 
   _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
     __base::store(__d);
     return __d;
   }
@@ -81,6 +83,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
   }
 
   _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
     // __atomic_fetch_add accepts function pointers, guard against them.
     static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
     return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
@@ -93,6 +96,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
   }
 
   _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
     // __atomic_fetch_add accepts function pointers, guard against them.
     static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
     return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
@@ -104,18 +108,41 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
     return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) volatile _NOEXCEPT { return fetch_add(1); }
   _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) _NOEXCEPT { return fetch_add(1); }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) volatile _NOEXCEPT { return fetch_sub(1); }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
+    return fetch_add(1);
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) _NOEXCEPT { return fetch_sub(1); }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator++() volatile _NOEXCEPT { return fetch_add(1) + 1; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
+    return fetch_sub(1);
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator++() _NOEXCEPT { return fetch_add(1) + 1; }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator--() volatile _NOEXCEPT { return fetch_sub(1) - 1; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator++() volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
+    return fetch_add(1) + 1;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator--() _NOEXCEPT { return fetch_sub(1) - 1; }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator--() volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
+    return fetch_sub(1) - 1;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT { return fetch_add(__op) + __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
+    return fetch_add(__op) + __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
+  _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp*, __base::is_always_lock_free);
+    return fetch_sub(__op) - __op;
+  }
 
   atomic& operator=(const atomic&)          = delete;
   atomic& operator=(const atomic&) volatile = delete;
@@ -201,9 +228,8 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
   atomic& operator=(const atomic&)          = delete;
   atomic& operator=(const atomic&) volatile = delete;
 
-  _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile noexcept
-    requires __base::is_always_lock_free
-  {
+  _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile noexcept {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     __base::store(__d);
     return __d;
   }
@@ -212,9 +238,8 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
     return __d;
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile noexcept
-    requires __base::is_always_lock_free
-  {
+  _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile noexcept {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return __fetch_add(*this, __op, __m);
   }
 
@@ -222,9 +247,8 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
     return __fetch_add(*this, __op, __m);
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile noexcept
-    requires __base::is_always_lock_free
-  {
+  _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile noexcept {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return __fetch_sub(*this, __op, __m);
   }
 
@@ -232,17 +256,15 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
     return __fetch_sub(*this, __op, __m);
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile noexcept
-    requires __base::is_always_lock_free
-  {
+  _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile noexcept {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return fetch_add(__op) + __op;
   }
 
   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) noexcept { return fetch_add(__op) + __op; }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile noexcept
-    requires __base::is_always_lock_free
-  {
+  _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile noexcept {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return fetch_sub(__op) - __op;
   }
 
@@ -272,8 +294,7 @@ atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NO
 }
 
 template <class _Tp>
-_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_HIDE_FROM_ABI void
-atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI void atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
   std::__cxx_atomic_init(std::addressof(__o->__a_), __d);
 }
 
diff --git a/libcxx/include/__atomic/atomic_base.h b/libcxx/include/__atomic/atomic_base.h
index 93f5c4cff0d1b..57f3b8b574f2b 100644
--- a/libcxx/include/__atomic/atomic_base.h
+++ b/libcxx/include/__atomic/atomic_base.h
@@ -27,6 +27,34 @@
 
 _LIBCPP_BEGIN_NAMESPACE_STD
 
+#if _LIBCPP_STD_VER >= 20
+template <class _Tp, bool __lock_free>
+inline constexpr bool __deprecated_if_not_always_lock_free = true;
+
+template <class _Tp>
+[[deprecated("volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false")]]
+inline constexpr bool __deprecated_if_not_always_lock_free<_Tp, false> = true;
+
+// Many volatile overloads of of atomic<T> methods have a requirement to
+// guarantee atomic<T>::is_always_lock_free is truen in C++20.
+// To make this a non-breaking change, this macro is used to emit a warning
+// when atomic<T>::is_always_lock_free is false without having to duplicate
+// the method. We could do:
+//
+// _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 
+//   requires is_always_lock_free { ... }
+//
+// [[deprecated(...)]] _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
+//   requires !is_always_lock_free { ... }
+//
+// But this creates a lot of unecessary duplicate code.
+#  define _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __is_always_lock_free)                                          \
+    static_assert(__deprecated_if_not_always_lock_free<_Tp, __is_always_lock_free>)
+#else
+#  define _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __is_always_lock_free)                                          \
+    {}
+#endif
+
 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
 struct __atomic_base // false
 {
@@ -44,6 +72,7 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
       _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, is_always_lock_free);
     std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
   }
   _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
@@ -52,15 +81,20 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
       _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, is_always_lock_free);
     return std::__cxx_atomic_load(std::addressof(__a_), __m);
   }
   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
       _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
     return std::__cxx_atomic_load(std::addressof(__a_), __m);
   }
-  _LIBCPP_HIDE_FROM_ABI operator _Tp() const volatile _NOEXCEPT { return load(); }
+  _LIBCPP_HIDE_FROM_ABI operator _Tp() const volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, is_always_lock_free);
+    return load();
+  }
   _LIBCPP_HIDE_FROM_ABI operator _Tp() const _NOEXCEPT { return load(); }
   _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, is_always_lock_free);
     return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m);
   }
   _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
@@ -69,6 +103,7 @@ struct __atomic_base // false
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT
       _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, is_always_lock_free);
     return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
   }
   _LIBCPP_HIDE_FROM_ABI bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
@@ -78,6 +113,7 @@ struct __atomic_base // false
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT
       _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, is_always_lock_free);
     return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
   }
   _LIBCPP_HIDE_FROM_ABI bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
@@ -86,6 +122,7 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, is_always_lock_free);
     return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
   }
   _LIBCPP_HIDE_FROM_ABI bool
@@ -94,6 +131,7 @@ struct __atomic_base // false
   }
   _LIBCPP_HIDE_FROM_ABI bool
   compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, is_always_lock_free);
     return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
   }
   _LIBCPP_HIDE_FROM_ABI bool
@@ -142,54 +180,112 @@ struct __atomic_base<_Tp, true> : public __atomic_base<_Tp, false> {
   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
 
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
     return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m);
   }
+
   _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
     return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m);
   }
 
-  _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT { return fetch_add(_Tp(1)); }
   _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) _NOEXCEPT { return fetch_add(_Tp(1)); }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) volatile _NOEXCEPT { return fetch_sub(_Tp(1)); }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_add(_Tp(1));
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) _NOEXCEPT { return fetch_sub(_Tp(1)); }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator++() volatile _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_sub(_Tp(1));
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator++() _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator--() volatile _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator++() volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_add(_Tp(1)) + _Tp(1);
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator--() _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator--() volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_sub(_Tp(1)) - _Tp(1);
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) _NOEXCEPT { return fetch_add(__op) + __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_add(__op) + __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) volatile _NOEXCEPT { return fetch_and(__op) & __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_sub(__op) - __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) _NOEXCEPT { return fetch_and(__op) & __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) volatile _NOEXCEPT { return fetch_or(__op) | __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_and(__op) & __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) _NOEXCEPT { return fetch_or(__op) | __op; }
-  _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) volatile _NOEXCEPT { return fetch_xor(__op) ^ __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_or(__op) | __op;
+  }
+
   _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) _NOEXCEPT { return fetch_xor(__op) ^ __op; }
+
+  _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) volatile _NOEXCEPT {
+    _LIBCPP_DEPRECATED_NOT_ALWAYS_LOCK_FREE(_Tp, __base::is_always_lock_free);
+    return fetch_xor(__op) ^ __op;
+  }
 };
 
 // Here we need _IsIntegral because the default template argument is not enough
diff --git a/libcxx/include/__config b/libcxx/include/__config
index 392053a64a8dc..705b0a4aee49d 100644
--- a/libcxx/include/__config
+++ b/libcxx/include/__config
@@ -741,6 +741,21 @@ typedef __char32_t char32_t;
 #    define _LIBCPP_DEPRECATED_WITH_CHAR8_T
 #  endif
 
+// P1831R1 deprecated many uses of volatile, but the way attributes work with template specializations require this
+// work-around to always raise warnings in cases where templates are specialized for volatile variants of STL types.
+# if _LIBCPP_STD_VER >= 20
+    template <class _Tp, bool __cxx20 = _LIBCPP_STD_VER >= 20>
+    _LIBCPP_HIDE_FROM_ABI constexpr bool __volatile_deprecated_since_cxx20_warning = true;
+    template <class _Tp>
+    _LIBCPP_DEPRECATED_IN_CXX20
+    _LIBCPP_HIDE_FROM_ABI constexpr bool __volatile_deprecated_since_cxx20_warning<_Tp, true> = true;
+#   define _LIBCPP_VOLATILE_DEPRECATED_WARNING static_assert(__volatile_deprecated_since_cxx20_warning<volatile _Tp>)
+#   define _LIBCPP_VOLATILE_DEPRECATED_WARNING                                                                    \
+    static_assert(__volatile_deprecated_since_cxx20_warning<volatile _Tp>)
+# endif
+#   define _LIBCPP_VOLATILE_DEPRECATED_WARNING do { } while (0)
+# endif
+
 // Macros to enter and leave a state where deprecation warnings are suppressed.
 #  if defined(_LIBCPP_COMPILER_CLANG_BASED) || defined(_LIBCPP_COMPILER_GCC)
 #    define _LIBCPP_SUPPRESS_DEPRECATED_PUSH                                                                           \
diff --git a/libcxx/include/__tuple/tuple_element.h b/libcxx/include/__tuple/tuple_element.h
index 9127c47dc8f1a..54434ea2edcb7 100644
--- a/libcxx/include/__tuple/tuple_element.h
+++ b/libcxx/include/__tuple/tuple_element.h
@@ -30,11 +30,13 @@ struct _LIBCPP_TEMPLATE_VIS tuple_element<_Ip, const _Tp> {
 
 template <size_t _Ip, class _Tp>
 struct _LIBCPP_TEMPLATE_VIS tuple_element<_Ip, volatile _Tp> {
+  _LIBCPP_VOLATILE_DEPRECATED_WARNING;
   typedef _LIBCPP_NODEBUG volatile typename tuple_element<_Ip, _Tp>::type type;
 };
 
 template <size_t _Ip, class _Tp>
 struct _LIBCPP_TEMPLATE_VIS tuple_element<_Ip, const volatile _Tp> {
+  _LIBCPP_VOLATILE_DEPRECATED_WARNING;
   typedef _LIBCPP_NODEBUG const volatile typename tuple_element<_Ip, _Tp>::type type;
 };
 
diff --git a/libcxx/include/__tuple/tuple_size.h b/libcxx/include/__tuple/tuple_size.h
index 18a17fd4d5878..949aed1e42e29 100644
--- a/libcxx/include/__tuple/tuple_size.h
+++ b/libcxx/include/__tuple/tuple_size.h
@@ -39,12 +39,16 @@ template <class _Tp>
 struct _LIBCPP_TEMPLATE_VIS tuple_size<__enable_if_tuple_size_imp< volatile _Tp,
                                                                    __enable_if_t<!is_const<_Tp>::value>,
                                                                    integral_constant<size_t, sizeof(tuple_size<_Tp>)>>>
-    : public integral_constant<size_t, tuple_size<_Tp>::value> {};
+    : public integral_constant<size_t, tuple_size<_Tp>::value> {
+  _LIBCPP_VOLATILE_DEPRECATED_WARNING;
+};
 
 template <class _Tp>
 struct _LIBCPP_TEMPLATE_VIS
 tuple_size<__enable_if_tuple_size_imp<const volatile _Tp, integral_constant<size_t, sizeof(tuple_size<_Tp>)>>>
-    : public integral_constant<size_t, tuple_size<_Tp>::value> {};
+    : public integral_constant<size_t, tuple_size<_Tp>::value> {
+  _LIBCPP_VOLATILE_DEPRECATED_WARNING;
+};
 
 #else
 template <class _Tp>
diff --git a/libcxx/include/atomic b/libcxx/include/atomic
index 772ac998615a9..8c9e21273fd20 100644
--- a/libcxx/include/atomic
+++ b/libcxx/include/atomic
@@ -74,30 +74,39 @@ struct atomic
     atomic& operator=(const atomic&) = delete;
     atomic& operator=(const atomic&) volatile = delete;
 
-    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
+    T load(memory_order m = memory_order_seq_cst) const volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     T load(memory_order m = memory_order_seq_cst) const noexcept;
-    operator T() const volatile noexcept;
+    operator T() const volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     operator T() const noexcept;
-    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
-    T operator=(T) volatile noexcept;
+    T operator=(T) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     T operator=(T) noexcept;
 
-    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_weak(T& expc, T desr,
-                               memory_order s, memory_order f) volatile noexcept;
+                               memory_order s, memory_order f) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
     bool compare_exchange_strong(T& expc, T desr,
-                                 memory_order s, memory_order f) volatile noexcept;
+                                 memory_order s, memory_order f) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     bool compare_exchange_strong(T& expc, T desr,
-                                 memory_order s, memory_order f) noexcept;
+                                 memory_order s, memory_order f) noexcept
     bool compare_exchange_weak(T& expc, T desr,
-                               memory_order m = memory_order_seq_cst) volatile noexcept;
+                               memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     bool compare_exchange_weak(T& expc, T desr,
                                memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_strong(T& expc, T desr,
-                                memory_order m = memory_order_seq_cst) volatile noexcept;
+                                memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     bool compare_exchange_strong(T& expc, T desr,
                                  memory_order m = memory_order_seq_cst) noexcept;
 
@@ -125,63 +134,85 @@ struct atomic<integral>
     atomic& operator=(const atomic&) = delete;
     atomic& operator=(const atomic&) volatile = delete;
 
-    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
+    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral load(memory_order m = memory_order_seq_cst) const noexcept;
     operator integral() const volatile noexcept;
     operator integral() const noexcept;
-    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
-    integral operator=(integral desr) volatile noexcept;
+    integral operator=(integral desr) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator=(integral desr) noexcept;
 
     integral exchange(integral desr,
-                      memory_order m = memory_order_seq_cst) volatile noexcept;
+                      memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_weak(integral& expc, integral desr,
-                               memory_order s, memory_order f) volatile noexcept;
+                               memory_order s, memory_order f) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     bool compare_exchange_weak(integral& expc, integral desr,
                                memory_order s, memory_order f) noexcept;
     bool compare_exchange_strong(integral& expc, integral desr,
-                                 memory_order s, memory_order f) volatile noexcept;
+                                 memory_order s, memory_order f) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     bool compare_exchange_strong(integral& expc, integral desr,
                                  memory_order s, memory_order f) noexcept;
     bool compare_exchange_weak(integral& expc, integral desr,
-                               memory_order m = memory_order_seq_cst) volatile noexcept;
+                               memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     bool compare_exchange_weak(integral& expc, integral desr,
                                memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_strong(integral& expc, integral desr,
-                                memory_order m = memory_order_seq_cst) volatile noexcept;
+                                memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     bool compare_exchange_strong(integral& expc, integral desr,
                                  memory_order m = memory_order_seq_cst) noexcept;
 
-    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
-    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
-    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
-    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
-    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
 
-    integral operator++(int) volatile noexcept;
+    integral operator++(int) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator++(int) noexcept;
-    integral operator--(int) volatile noexcept;
+    integral operator--(int) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator--(int) noexcept;
-    integral operator++() volatile noexcept;
+    integral operator++() volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator++() noexcept;
-    integral operator--() volatile noexcept;
+    integral operator--() volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator--() noexcept;
-    integral operator+=(integral op) volatile noexcept;
+    integral operator+=(integral op) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator+=(integral op) noexcept;
-    integral operator-=(integral op) volatile noexcept;
+    integral operator-=(integral op) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator-=(integral op) noexcept;
-    integral operator&=(integral op) volatile noexcept;
+    integral operator&=(integral op) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator&=(integral op) noexcept;
-    integral operator|=(integral op) volatile noexcept;
+    integral operator|=(integral op) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator|=(integral op) noexcept;
-    integral operator^=(integral op) volatile noexcept;
+    integral operator^=(integral op) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                  // constraint since C++20
     integral operator^=(integral op) noexcept;
 
     void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept; // since C++20
@@ -209,49 +240,66 @@ struct atomic<T*>
     atomic& operator=(const atomic&) = delete;
     atomic& operator=(const atomic&) volatile = delete;
 
-    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
+    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* load(memory_order m = memory_order_seq_cst) const noexcept;
-    operator T*() const volatile noexcept;
+    operator T*() const volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     operator T*() const noexcept;
-    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
-    T* operator=(T*) volatile noexcept;
+    T* operator=(T*) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* operator=(T*) noexcept;
 
-    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
+    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_weak(T*& expc, T* desr,
-                               memory_order s, memory_order f) volatile noexcept;
+                               memory_order s, memory_order f) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     bool compare_exchange_weak(T*& expc, T* desr,
                                memory_order s, memory_order f) noexcept;
     bool compare_exchange_strong(T*& expc, T* desr,
-                                 memory_order s, memory_order f) volatile noexcept;
+                                 memory_order s, memory_order f) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     bool compare_exchange_strong(T*& expc, T* desr,
                                  memory_order s, memory_order f) noexcept;
     bool compare_exchange_weak(T*& expc, T* desr,
-                               memory_order m = memory_order_seq_cst) volatile noexcept;
+                               memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     bool compare_exchange_weak(T*& expc, T* desr,
                                memory_order m = memory_order_seq_cst) noexcept;
     bool compare_exchange_strong(T*& expc, T* desr,
-                                memory_order m = memory_order_seq_cst) volatile noexcept;
+                                memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     bool compare_exchange_strong(T*& expc, T* desr,
                                  memory_order m = memory_order_seq_cst) noexcept;
-    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
-    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
+    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
 
-    T* operator++(int) volatile noexcept;
+    T* operator++(int) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* operator++(int) noexcept;
-    T* operator--(int) volatile noexcept;
+    T* operator--(int) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* operator--(int) noexcept;
-    T* operator++() volatile noexcept;
+    T* operator++() volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* operator++() noexcept;
-    T* operator--() volatile noexcept;
+    T* operator--() volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* operator--() noexcept;
-    T* operator+=(ptrdiff_t op) volatile noexcept;
+    T* operator+=(ptrdiff_t op) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* operator+=(ptrdiff_t op) noexcept;
-    T* operator-=(ptrdiff_t op) volatile noexcept;
+    T* operator-=(ptrdiff_t op) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
     T* operator-=(ptrdiff_t op) noexcept;
 
     void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept; // since C++20
@@ -277,48 +325,61 @@ struct atomic<floating-point-type> {  // since C++20
   atomic& operator=(const atomic&) = delete;
   atomic& operator=(const atomic&) volatile = delete;
 
-  void store(floating-point-type, memory_order = memory_order::seq_cst) volatile noexcept;
+  void store(floating-point-type, memory_order = memory_order::seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   void store(floating-point-type, memory_order = memory_order::seq_cst) noexcept;
-  floating-point-type operator=(floating-point-type) volatile noexcept;
+  floating-point-type operator=(floating-point-type) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   floating-point-type operator=(floating-point-type) noexcept;
-  floating-point-type load(memory_order = memory_order::seq_cst) volatile noexcept;
+  floating-point-type load(memory_order = memory_order::seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   floating-point-type load(memory_order = memory_order::seq_cst) noexcept;
-  operator floating-point-type() volatile noexcept;
+  operator floating-point-type() volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   operator floating-point-type() noexcept;
 
   floating-point-type exchange(floating-point-type,
-                               memory_order = memory_order::seq_cst) volatile noexcept;
+                               memory_order = memory_order::seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   floating-point-type exchange(floating-point-type,
                                memory_order = memory_order::seq_cst) noexcept;
   bool compare_exchange_weak(floating-point-type&, floating-point-type,
-                             memory_order, memory_order) volatile noexcept;
+                             memory_order, memory_order) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   bool compare_exchange_weak(floating-point-type&, floating-point-type,
                              memory_order, memory_order) noexcept;
   bool compare_exchange_strong(floating-point-type&, floating-point-type,
-                               memory_order, memory_order) volatile noexcept;
+                               memory_order, memory_order) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   bool compare_exchange_strong(floating-point-type&, floating-point-type,
                                memory_order, memory_order) noexcept;
   bool compare_exchange_weak(floating-point-type&, floating-point-type,
-                             memory_order = memory_order::seq_cst) volatile noexcept;
+                             memory_order = memory_order::seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   bool compare_exchange_weak(floating-point-type&, floating-point-type,
                              memory_order = memory_order::seq_cst) noexcept;
   bool compare_exchange_strong(floating-point-type&, floating-point-type,
-                               memory_order = memory_order::seq_cst) volatile noexcept;
+                               memory_order = memory_order::seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   bool compare_exchange_strong(floating-point-type&, floating-point-type,
                                memory_order = memory_order::seq_cst) noexcept;
 
   floating-point-type fetch_add(floating-point-type,
-                                memory_order = memory_order::seq_cst) volatile noexcept;
+                                memory_order = memory_order::seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   floating-point-type fetch_add(floating-point-type,
                                 memory_order = memory_order::seq_cst) noexcept;
   floating-point-type fetch_sub(floating-point-type,
-                                memory_order = memory_order::seq_cst) volatile noexcept;
+                                memory_order = memory_order::seq_cst) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   floating-point-type fetch_sub(floating-point-type,
                                 memory_order = memory_order::seq_cst) noexcept;
 
-  floating-point-type operator+=(floating-point-type) volatile noexcept;
+  floating-point-type operator+=(floating-point-type) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   floating-point-type operator+=(floating-point-type) noexcept;
-  floating-point-type operator-=(floating-point-type) volatile noexcept;
+  floating-point-type operator-=(floating-point-type) volatile noexcept
+      requires atomic<T>::is_always_lock_free;                                   // constraint since C++20
   floating-point-type operator-=(floating-point-type) noexcept;
 
   void wait(floating-point-type, memory_order = memory_order::seq_cst) const volatile noexcept; // since C++20
@@ -574,7 +635,8 @@ void atomic_signal_fence(memory_order m) noexcept;
 // deprecated
 
 template <class T>
-  void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept;
+  void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept
+    requires atomic<T>::is_always_lock_free;                                     // constraint since C++20
 
 template <class T>
   void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept;
diff --git a/libcxx/include/tuple b/libcxx/include/tuple
index 081b90c7bbec5..46b7b70bf6b13 100644
--- a/libcxx/include/tuple
+++ b/libcxx/include/tuple
@@ -132,7 +132,7 @@ tuple(allocator_arg_t, Alloc, pair<T1, T2>) -> tuple<T1, T2>;       // since C++
 template <class Alloc, class ...T>
 tuple(allocator_arg_t, Alloc, tuple<T...>) -> tuple<T...>;          // since C++17
 
-struct ignore-type { // exposition only                             // Since C++26
+struct ignore-type { // exposition only                             // since C++26
   constexpr const ignore-type&
     operator=(const auto &) const noexcept
       { return *this; }
@@ -154,12 +154,21 @@ template <class T, class Tuple>
 template <class T> struct tuple_size; // undefined
 template <class... T> struct tuple_size<tuple<T...>>;
 template <class T>
- inline constexpr size_t tuple_size_v = tuple_size<T>::value; // C++17
+  inline constexpr size_t tuple_size_v = tuple_size<T>::value; // C++17
+
+template <class T> struct tuple_size<const T>;
+template <class T> struct tuple_size<volatile T>;       // deprecated in C++20
+template <class T> struct tuple_size<const volatile T>; // deprecated in C++20
+
 template <size_t I, class T> struct tuple_element; // undefined
 template <size_t I, class... T> struct tuple_element<I, tuple<T...>>;
 template <size_t I, class T>
   using tuple_element_t = typename tuple_element <I, T>::type; // C++14
 
+template <size_t I, class T> struct tuple_element<I, const T>;
+template <size_t I, class T> struct tuple_element<I, volatile T>;       // deprecated in C++20
+template <size_t I, class T> struct tuple_element<I, const volatile T>; // deprecated in C++20
+
 // 20.4.1.5, element access:
 template <size_t I, class... T>
     typename tuple_element<I, tuple<T...>>::type&
diff --git a/libcxx/include/variant b/libcxx/include/variant
index 5f2d03b7227b8..cbcd7d9ebbc6d 100644
--- a/libcxx/include/variant
+++ b/libcxx/include/variant
@@ -74,9 +74,9 @@ namespace std {
 
     // [variant.visit], visitation
     template<class Self, class Visitor>
-      constexpr decltype(auto) visit(this Self&&, Visitor&&); // Since C++26
+      constexpr decltype(auto) visit(this Self&&, Visitor&&); // since C++26
     template<class R, class Self, class Visitor>
-      constexpr R visit(this Self&&, Visitor&&);              // Since C++26
+      constexpr R visit(this Self&&, Visitor&&);              // since C++26
   };
 
   // 20.7.3, variant helper classes
@@ -86,8 +86,8 @@ namespace std {
   inline constexpr size_t variant_size_v = variant_size<T>::value;
 
   template <class T> struct variant_size<const T>;
-  template <class T> struct variant_size<volatile T>;
-  template <class T> struct variant_size<const volatile T>;
+  template <class T> struct variant_size<volatile T>;       // deprecated in C++20
+  template <class T> struct variant_size<const volatile T>; // deprecated in C++20
 
   template <class... Types>
   struct variant_size<variant<Types...>>;
@@ -98,8 +98,8 @@ namespace std {
   using variant_alternative_t = typename variant_alternative<I, T>::type;
 
   template <size_t I, class T> struct variant_alternative<I, const T>;
-  template <size_t I, class T> struct variant_alternative<I, volatile T>;
-  template <size_t I, class T> struct variant_alternative<I, const volatile T>;
+  template <size_t I, class T> struct variant_alternative<I, volatile T>;       // deprecated in C++20
+  template <size_t I, class T> struct variant_alternative<I, const volatile T>; // deprecated in C++20
 
   template <size_t I, class... Types>
   struct variant_alternative<I, variant<Types...>>;
@@ -318,10 +318,14 @@ template <class _Tp>
 struct _LIBCPP_TEMPLATE_VIS variant_size<const _Tp> : variant_size<_Tp> {};
 
 template <class _Tp>
-struct _LIBCPP_TEMPLATE_VIS variant_size<volatile _Tp> : variant_size<_Tp> {};
+struct _LIBCPP_TEMPLATE_VIS variant_size<volatile _Tp> : variant_size<_Tp> {
+  _LIBCPP_VOLATILE_DEPRECATED_WARNING;
+};
 
 template <class _Tp>
-struct _LIBCPP_TEMPLATE_VIS variant_size<const volatile _Tp> : variant_size<_Tp> {};
+struct _LIBCPP_TEMPLATE_VIS variant_size<const volatile _Tp> : variant_size<_Tp> {
+  _LIBCPP_VOLATILE_DEPRECATED_WARNING;
+};
 
 template <class... _Types>
 struct _LIBCPP_TEMPLATE_VIS variant_size<variant<_Types...>> : integral_constant<size_t, sizeof...(_Types)> {};
@@ -336,10 +340,14 @@ template <size_t _Ip, class _Tp>
 struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, const _Tp> : add_const<variant_alternative_t<_Ip, _Tp>> {};
 
 template <size_t _Ip, class _Tp>
-struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, volatile _Tp> : add_volatile<variant_alternative_t<_Ip, _Tp>> {};
+struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, volatile _Tp> : add_volatile<variant_alternative_t<_Ip, _Tp>> {
+  _LIBCPP_VOLATILE_DEPRECATED_WARNING;
+};
 
 template <size_t _Ip, class _Tp>
-struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, const volatile _Tp> : add_cv<variant_alternative_t<_Ip, _Tp>> {};
+struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, const volatile _Tp> : add_cv<variant_alternative_t<_Ip, _Tp>> {
+  _LIBCPP_VOLATILE_DEPRECATED_WARNING;
+};
 
 template <size_t _Ip, class... _Types>
 struct _LIBCPP_TEMPLATE_VIS variant_alternative<_Ip, variant<_Types...>> {
diff --git a/libcxx/test/libcxx/atomics/atomics.types.operations/atomic_volatile_require_lock_free_in_cxx20.verify.cpp b/libcxx/test/libcxx/atomics/atomics.types.operations/atomic_volatile_require_lock_free_in_cxx20.verify.cpp
new file mode 100644
index 0000000000000..d835422f2c05b
--- /dev/null
+++ b/libcxx/test/libcxx/atomics/atomics.types.operations/atomic_volatile_require_lock_free_in_cxx20.verify.cpp
@@ -0,0 +1,87 @@
+//===----------------------------------------------------------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+
+// <atomic>
+
+// UNSUPPORTED: c++03, c++11, c++17
+
+#include <atomic>
+
+struct arr {
+  int x[32];
+};
+
+struct arr2 {
+  int x[32];
+};
+
+void f() {
+  std::memory_order ord = std::memory_order_relaxed;
+
+  int expected = 0, desired = 0;
+  std::atomic<int> i{};
+  i.operator=(0);
+  i.store(0, ord);
+  i.load(ord);
+  i.operator int();
+  i.exchange(0, ord);
+  i.compare_exchange_weak(expected, desired, ord);
+  i.compare_exchange_weak(expected, desired, ord, ord);
+  i.compare_exchange_strong(expected, desired, ord);
+  i.compare_exchange_strong(expected, desired, ord, ord);
+
+  volatile std::atomic<int> vi{};
+  vi.operator=(0);
+  vi.store(0, ord);
+  vi.load(ord);
+  vi.operator int();
+  vi.exchange(0, ord);
+  vi.compare_exchange_weak(expected, desired, ord);
+  vi.compare_exchange_weak(expected, desired, ord, ord);
+  vi.compare_exchange_strong(expected, desired, ord);
+  vi.compare_exchange_strong(expected, desired, ord, ord);
+
+  arr test_value;
+
+  volatile std::atomic<arr> va{};
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.operator=(test_value);
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.store(test_value, ord);
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.load(ord);
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.operator arr();
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.exchange(test_value, ord);
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.compare_exchange_weak(test_value, test_value, ord);
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.compare_exchange_weak(test_value, test_value, ord, ord);
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.compare_exchange_strong(test_value, test_value, ord);
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  va.compare_exchange_strong(test_value, test_value, ord, ord);
+
+  const volatile std::atomic<arr2> cva{};
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr2, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  cva.load(ord);
+
+  // expected-warning@*:* {{'__deprecated_if_not_always_lock_free<arr2, false>' is deprecated: volatile atomic operations are deprecated when std::atomic<T>::is_always_lock_free is false}}
+  cva.operator arr2();
+}
diff --git a/libcxx/test/libcxx/utilities/tuple/tuple.tuple/tuple.helper/volatile_tuple_element_deprecated_in_cxx20.verify.cpp b/libcxx/test/libcxx/utilities/tuple/tuple.tuple/tuple.helper/volatile_tuple_element_deprecated_in_cxx20.verify.cpp
new file mode 100644
index 0000000000000..2d4ab39424744
--- /dev/null
+++ b/libcxx/test/libcxx/utilities/tuple/tuple.tuple/tuple.helper/volatile_tuple_element_deprecated_in_cxx20.verify.cpp
@@ -0,0 +1,21 @@
+//===----------------------------------------------------------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+
+// <tuple>
+
+// UNSUPPORTED: c++03, c++11, c++17
+
+#include <tuple>
+
+[[maybe_unused]] std::tuple_element<0, std::tuple<void, void>> test;
+
+// expected-warning@*:* {{'__volatile_deprecated_since_cxx20_warning<volatile std::tuple<void, void>>' is deprecated}}
+[[maybe_unused]] std::tuple_element<0, volatile std::tuple<void, void>> vol_test;
+
+// expected-warning@*:* {{'__volatile_deprecated_since_cxx20_warning<const volatile std::tuple<void, void>>' is deprecated}}
+[[maybe_unused]] std::tuple_element<0, const volatile std::tuple<void, void>> const_vol_test;
diff --git a/libcxx/test/libcxx/utilities/tuple/tuple.tuple/tuple.helper/volatile_tuple_size_deprecated_in_cxx20.verify.cpp b/libcxx/test/libcxx/utilities/tuple/tuple.tuple/tuple.helper/volatile_tuple_size_deprecated_in_cxx20.verify.cpp
new file mode 100644
index 0000000000000..4150ab94f83a7
--- /dev/null
+++ b/libcxx/test/libcxx/utilities/tuple/tuple.tuple/tuple.helper/volatile_tuple_size_deprecated_in_cxx20.verify.cpp
@@ -0,0 +1,21 @@
+//===----------------------------------------------------------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+
+// <tuple>
+
+// UNSUPPORTED: c++03, c++11, c++17
+
+#include <tuple>
+
+[[maybe_unused]] std::tuple_size<std::tuple<void, void>> test;
+
+// expected-warning@*:* {{'__volatile_deprecated_since_cxx20_warning<volatile std::tuple<void, void>>' is deprecated}}
+[[maybe_unused]] std::tuple_size<volatile std::tuple<void, void>> vol_test;
+
+// expected-warning@*:* {{'__volatile_deprecated_since_cxx20_warning<const volatile std::tuple<void, void>>' is deprecated}}
+[[maybe_unused]] std::tuple_size<const volatile std::tuple<void, void>> const_vol_test;
diff --git a/libcxx/test/libcxx/utilities/variant/variant.variant/variant.helper/volatile_deprecated.verify.cpp b/libcxx/test/libcxx/utilities/variant/variant.variant/variant.helper/volatile_deprecated.verify.cpp
new file mode 100644
index 0000000000000..e0071440314bc
--- /dev/null
+++ b/libcxx/test/libcxx/utilities/variant/variant.variant/variant.helper/volatile_deprecated.verify.cpp
@@ -0,0 +1,31 @@
+//===----------------------------------------------------------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+
+// <variant>
+
+// UNSUPPORTED: c++03, c++11, c++17
+
+#include <variant>
+
+typedef std::variant<void, int> vars;
+
+[[maybe_unused]] std::variant_alternative<0, vars> alt_test;
+
+// expected-warning@*:* {{'__volatile_deprecated_since_cxx20_warning<volatile std::variant<void, int>>' is deprecated}}
+[[maybe_unused]] std::variant_alternative<0, volatile vars> vol_alt_test;
+
+// expected-warning@*:* {{'__volatile_deprecated_since_cxx20_warning<const volatile std::variant<void, int>>' is deprecated}}
+[[maybe_unused]] std::variant_alternative<0, const volatile vars> const_vol_alt_test;
+
+[[maybe_unused]] std::variant_size<vars> size_test;
+
+// expected-warning@*:* {{'__volatile_deprecated_since_cxx20_warning<volatile std::variant<void, int>>' is deprecated}}
+[[maybe_unused]] std::variant_size<volatile vars> vol_size_test;
+
+// expected-warning@*:* {{'__volatile_deprecated_since_cxx20_warning<const volatile std::variant<void, int>>' is deprecated}}
+[[maybe_unused]] std::variant_size<const volatile vars> const_vol_size_test;



More information about the libcxx-commits mailing list