[clang] [libcxx] [clang] [libc++] P3309 constexpr atomic and atomic ref [WIP] (PR #98738)
Hana Dusíková via cfe-commits
cfe-commits at lists.llvm.org
Sat Jul 13 05:28:44 PDT 2024
https://github.com/hanickadot updated https://github.com/llvm/llvm-project/pull/98738
>From c691efa7649c990814bb363511106457e306aefa Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Hana=20Dusi=CC=81kova=CC=81?= <hanicka at hanicka.net>
Date: Sat, 13 Jul 2024 14:28:07 +0200
Subject: [PATCH] [clang] [libc++] atomic constexpr support
---
clang/include/clang/Basic/Builtins.td | 84 ++--
clang/lib/AST/ExprConstant.cpp | 449 +++++++++++++++++++++-
libcxx/include/__atomic/atomic.h | 89 +++--
libcxx/include/__atomic/atomic_base.h | 76 ++--
libcxx/include/__atomic/atomic_flag.h | 56 ++-
libcxx/include/__atomic/atomic_ref.h | 314 ++++++++++-----
libcxx/include/__atomic/cxx_atomic_impl.h | 77 ++--
7 files changed, 871 insertions(+), 274 deletions(-)
diff --git a/clang/include/clang/Basic/Builtins.td b/clang/include/clang/Basic/Builtins.td
index f5b15cf90d1f8..0716cf02f5110 100644
--- a/clang/include/clang/Basic/Builtins.td
+++ b/clang/include/clang/Basic/Builtins.td
@@ -1682,97 +1682,97 @@ def SyncSwapN : Builtin, SyncBuiltinsTemplate {
// C11 _Atomic operations for <stdatomic.h>.
def C11AtomicInit : AtomicBuiltin {
let Spellings = ["__c11_atomic_init"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicLoad : AtomicBuiltin {
let Spellings = ["__c11_atomic_load"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicStore : AtomicBuiltin {
let Spellings = ["__c11_atomic_store"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicExchange : AtomicBuiltin {
let Spellings = ["__c11_atomic_exchange"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicCompareExchangeStrong : AtomicBuiltin {
let Spellings = ["__c11_atomic_compare_exchange_strong"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicCompareExchangeWeak : AtomicBuiltin {
let Spellings = ["__c11_atomic_compare_exchange_weak"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicFetchAdd : AtomicBuiltin {
let Spellings = ["__c11_atomic_fetch_add"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicFetchSub : AtomicBuiltin {
let Spellings = ["__c11_atomic_fetch_sub"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicFetchAnd : AtomicBuiltin {
let Spellings = ["__c11_atomic_fetch_and"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicFetchOr : AtomicBuiltin {
let Spellings = ["__c11_atomic_fetch_or"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicFetchXor : AtomicBuiltin {
let Spellings = ["__c11_atomic_fetch_xor"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicFetchNand : AtomicBuiltin {
let Spellings = ["__c11_atomic_fetch_nand"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicFetchMax : AtomicBuiltin {
let Spellings = ["__c11_atomic_fetch_max"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicFetchMin : AtomicBuiltin {
let Spellings = ["__c11_atomic_fetch_min"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def C11AtomicThreadFence : Builtin {
let Spellings = ["__c11_atomic_thread_fence"];
- let Attributes = [NoThrow];
+ let Attributes = [NoThrow, Constexpr];
let Prototype = "void(int)";
}
def C11AtomicSignalFence : Builtin {
let Spellings = ["__c11_atomic_signal_fence"];
- let Attributes = [NoThrow];
+ let Attributes = [NoThrow, Constexpr];
let Prototype = "void(int)";
}
@@ -1785,157 +1785,157 @@ def C11AtomicIsLockFree : Builtin {
// GNU atomic builtins.
def AtomicLoad : AtomicBuiltin {
let Spellings = ["__atomic_load"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicLoadN : AtomicBuiltin {
let Spellings = ["__atomic_load_n"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicStore : AtomicBuiltin {
let Spellings = ["__atomic_store"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicStoreN : AtomicBuiltin {
let Spellings = ["__atomic_store_n"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicExchange : AtomicBuiltin {
let Spellings = ["__atomic_exchange"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicExchangeN : AtomicBuiltin {
let Spellings = ["__atomic_exchange_n"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicCompareExchange : AtomicBuiltin {
let Spellings = ["__atomic_compare_exchange"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicCompareExchangeN : AtomicBuiltin {
let Spellings = ["__atomic_compare_exchange_n"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicFetchAdd : AtomicBuiltin {
let Spellings = ["__atomic_fetch_add"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicFetchSub : AtomicBuiltin {
let Spellings = ["__atomic_fetch_sub"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicFetchAnd : AtomicBuiltin {
let Spellings = ["__atomic_fetch_and"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicFetchOr : AtomicBuiltin {
let Spellings = ["__atomic_fetch_or"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicFetchXor : AtomicBuiltin {
let Spellings = ["__atomic_fetch_xor"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicFetchNand : AtomicBuiltin {
let Spellings = ["__atomic_fetch_nand"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicAddFetch : AtomicBuiltin {
let Spellings = ["__atomic_add_fetch"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicSubFetch : AtomicBuiltin {
let Spellings = ["__atomic_sub_fetch"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicAndFetch : AtomicBuiltin {
let Spellings = ["__atomic_and_fetch"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicOrFetch : AtomicBuiltin {
let Spellings = ["__atomic_or_fetch"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicXorFetch : AtomicBuiltin {
let Spellings = ["__atomic_xor_fetch"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicMaxFetch : AtomicBuiltin {
let Spellings = ["__atomic_max_fetch"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicMinFetch : AtomicBuiltin {
let Spellings = ["__atomic_min_fetch"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicNandFetch : AtomicBuiltin {
let Spellings = ["__atomic_nand_fetch"];
- let Attributes = [CustomTypeChecking];
+ let Attributes = [CustomTypeChecking, Constexpr];
let Prototype = "void(...)";
}
def AtomicTestAndSet : Builtin {
let Spellings = ["__atomic_test_and_set"];
- let Attributes = [NoThrow];
+ let Attributes = [NoThrow, Constexpr];
let Prototype = "bool(void volatile*, int)";
}
def AtomicClear : Builtin {
let Spellings = ["__atomic_clear"];
- let Attributes = [NoThrow];
+ let Attributes = [NoThrow, Constexpr];
let Prototype = "void(void volatile*, int)";
}
def AtomicThreadFence : Builtin {
let Spellings = ["__atomic_thread_fence"];
- let Attributes = [NoThrow];
+ let Attributes = [NoThrow, Constexpr];
let Prototype = "void(int)";
}
def AtomicSignalFence : Builtin {
let Spellings = ["__atomic_signal_fence"];
- let Attributes = [NoThrow];
+ let Attributes = [NoThrow, Constexpr];
let Prototype = "void(int)";
}
diff --git a/clang/lib/AST/ExprConstant.cpp b/clang/lib/AST/ExprConstant.cpp
index 0aeac9d03eed3..32f04ccaa1205 100644
--- a/clang/lib/AST/ExprConstant.cpp
+++ b/clang/lib/AST/ExprConstant.cpp
@@ -1900,6 +1900,17 @@ static bool EvaluateFixedPoint(const Expr *E, APFixedPoint &Result,
// Misc utilities
//===----------------------------------------------------------------------===//
+static bool isOnePastTheEndOfCompleteObject(const ASTContext &Ctx,
+ const LValue &LV);
+
+enum class SizeOfType {
+ SizeOf,
+ DataSizeOf,
+};
+
+static bool HandleSizeof(EvalInfo &Info, SourceLocation Loc, QualType Type,
+ CharUnits &Size, SizeOfType SOT = SizeOfType::SizeOf);
+
/// Negate an APSInt in place, converting it to a signed form if necessary, and
/// preserving its value (by extending by up to one bit as needed).
static void negateAsSigned(APSInt &Int) {
@@ -3222,14 +3233,9 @@ static bool HandleLValueIndirectMember(EvalInfo &Info, const Expr *E,
return true;
}
-enum class SizeOfType {
- SizeOf,
- DataSizeOf,
-};
-
/// Get the size of the given type in char units.
static bool HandleSizeof(EvalInfo &Info, SourceLocation Loc, QualType Type,
- CharUnits &Size, SizeOfType SOT = SizeOfType::SizeOf) {
+ CharUnits &Size, SizeOfType SOT) {
// sizeof(void), __alignof__(void), sizeof(function) = 1 as a gcc
// extension.
if (Type->isVoidType() || Type->isFunctionType()) {
@@ -7884,6 +7890,426 @@ class ExprEvaluatorBase
return StmtVisitorTy::Visit(Source);
}
+ static bool EvaluateOrder(const Expr *E, EvalInfo &Info) {
+ // we ignore order
+ [[maybe_unused]] APSInt Order;
+ if (!EvaluateInteger(E, Order, Info)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ static bool ReadAtomicPtr(const AtomicExpr *E, APValue &Result,
+ EvalInfo &Info) {
+ LValue AtomicLV;
+ if (!EvaluatePointer(E->getPtr(), AtomicLV, Info)) {
+ return false;
+ }
+
+ if (!handleLValueToRValueConversion(Info, E->getPtr(), E->getType(),
+ AtomicLV, Result)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ static bool LoadAtomicValue(const AtomicExpr *E, APValue &Result,
+ EvalInfo &Info) {
+ if (!ReadAtomicPtr(E, Result, Info)) {
+ return false;
+ }
+
+ // we ignore order
+ if (!EvaluateOrder(E->getOrder(), Info)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ static bool FetchAtomicOp(const AtomicExpr *E, APValue &Result,
+ EvalInfo &Info, bool StoreToResultAfter) {
+ LValue AtomicLV;
+ QualType AtomicTy =
+ E->getPtr()->getType()->getPointeeType().getAtomicUnqualifiedType();
+ if (!EvaluatePointer(E->getPtr(), AtomicLV, Info)) {
+ return false;
+ }
+
+ APValue AtomicVal;
+ if (!handleLValueToRValueConversion(Info, E->getPtr(), E->getType(),
+ AtomicLV, AtomicVal)) {
+ return false;
+ }
+
+ if (!StoreToResultAfter) {
+ Result = AtomicVal;
+ }
+
+ const auto ResultType = E->getType();
+
+ APValue ArgumentVal;
+ if (!Evaluate(ArgumentVal, Info, E->getVal1())) {
+ return false;
+ }
+
+ APValue Replacement;
+ if (ResultType->isIntegralOrEnumerationType()) {
+ const APSInt AtomicInt = AtomicVal.getInt();
+ const APSInt ArgumentInt = ArgumentVal.getInt();
+
+ switch (E->getOp()) {
+ case AtomicExpr::AO__c11_atomic_fetch_add:
+ case AtomicExpr::AO__atomic_fetch_add:
+ case AtomicExpr::AO__atomic_add_fetch:
+ Replacement = APValue(AtomicInt + ArgumentInt);
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_sub:
+ case AtomicExpr::AO__atomic_fetch_sub:
+ case AtomicExpr::AO__atomic_sub_fetch:
+ Replacement = APValue(AtomicInt - ArgumentInt);
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_and:
+ case AtomicExpr::AO__atomic_fetch_and:
+ case AtomicExpr::AO__atomic_and_fetch:
+ Replacement = APValue(AtomicInt & ArgumentInt);
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_or:
+ case AtomicExpr::AO__atomic_fetch_or:
+ case AtomicExpr::AO__atomic_or_fetch:
+ Replacement = APValue(AtomicInt | ArgumentInt);
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_xor:
+ case AtomicExpr::AO__atomic_fetch_xor:
+ case AtomicExpr::AO__atomic_xor_fetch:
+ Replacement = APValue(AtomicInt ^ ArgumentInt);
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_nand:
+ case AtomicExpr::AO__atomic_fetch_nand:
+ case AtomicExpr::AO__atomic_nand_fetch:
+ Replacement = APValue(~(AtomicInt & ArgumentInt));
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_max:
+ case AtomicExpr::AO__atomic_fetch_max:
+ case AtomicExpr::AO__atomic_max_fetch:
+ Replacement =
+ APValue((AtomicInt > ArgumentInt) ? AtomicInt : ArgumentInt);
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_min:
+ case AtomicExpr::AO__atomic_fetch_min:
+ case AtomicExpr::AO__atomic_min_fetch:
+ Replacement =
+ APValue((AtomicInt < ArgumentInt) ? AtomicInt : ArgumentInt);
+ break;
+ default:
+ return false;
+ }
+ } else if (ResultType->isRealFloatingType()) {
+ const llvm::RoundingMode RM = getActiveRoundingMode(Info, E);
+ APFloat AtomicFlt = AtomicVal.getFloat();
+ const APFloat ArgumentFlt = ArgumentVal.getFloat();
+ APFloat::opStatus St;
+
+ switch (E->getOp()) {
+ case AtomicExpr::AO__c11_atomic_fetch_add: // GCC atomics doesn't support
+ // floats
+ St = AtomicFlt.add(ArgumentFlt, RM);
+ Replacement = APValue(AtomicFlt);
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_sub:
+ St = AtomicFlt.subtract(ArgumentFlt, RM);
+ Replacement = APValue(AtomicFlt);
+ break;
+ default:
+ return false;
+ }
+
+ if (!checkFloatingPointResult(Info, E, St)) {
+ return false;
+ }
+ } else if (ResultType->isPointerType()) {
+ LValue AtomicPtr;
+ AtomicPtr.setFrom(Info.Ctx, AtomicVal);
+
+ APSInt ArgumentInt = ArgumentVal.getInt();
+
+ CharUnits SizeOfPointee;
+ if (!HandleSizeof(Info, E->getExprLoc(), AtomicTy->getPointeeType(),
+ SizeOfPointee))
+ return false;
+
+ // GCC's atomic_fetch add/sub compute new pointer by bytes and not
+ // sizeof(T)
+ switch (E->getOp()) {
+ case AtomicExpr::AO__atomic_fetch_add:
+ case AtomicExpr::AO__atomic_add_fetch:
+ case AtomicExpr::AO__atomic_fetch_sub:
+ case AtomicExpr::AO__atomic_sub_fetch: {
+ const auto sizeOfOneItem =
+ APSInt(APInt(ArgumentInt.getBitWidth(), SizeOfPointee.getQuantity(),
+ false),
+ false);
+ if ((ArgumentInt % sizeOfOneItem) != 0) {
+ // incrementing pointer by size which is not dividable by pointee size
+ // is UB and therefore disallowed
+ return false;
+ }
+ ArgumentInt /= sizeOfOneItem;
+ } break;
+ default:
+ break;
+ }
+
+ switch (E->getOp()) {
+ case AtomicExpr::AO__c11_atomic_fetch_add:
+ case AtomicExpr::AO__atomic_fetch_add:
+ case AtomicExpr::AO__atomic_add_fetch:
+ AtomicPtr.adjustOffsetAndIndex(Info, E, ArgumentInt, SizeOfPointee);
+ break;
+ case AtomicExpr::AO__c11_atomic_fetch_sub:
+ case AtomicExpr::AO__atomic_fetch_sub:
+ case AtomicExpr::AO__atomic_sub_fetch:
+ ArgumentInt.negate();
+ AtomicPtr.adjustOffsetAndIndex(Info, E, ArgumentInt, SizeOfPointee);
+ break;
+ default:
+ return false;
+ }
+
+ AtomicPtr.moveInto(Replacement);
+ } else {
+ // not float,int,pointer?
+ return false;
+ }
+
+ if (StoreToResultAfter) {
+ Result = Replacement;
+ }
+
+ if (!handleAssignment(Info, E, AtomicLV, AtomicTy, Replacement)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ static bool StoreAtomicValue(const AtomicExpr *E, EvalInfo &Info) {
+ LValue LV;
+ if (!EvaluatePointer(E->getPtr(), LV, Info)) {
+ return false;
+ }
+
+ APValue NewVal;
+ if (!Evaluate(NewVal, Info, E->getVal1())) {
+ return false;
+ }
+
+ if (!handleAssignment(Info, E, LV, E->getVal1()->getType(), NewVal)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ static bool CompareExchangeAtomicValue(const AtomicExpr *E, APValue &Result,
+ EvalInfo &Info) {
+ // dereference _Atomic * (atomic value)
+ LValue AtomicLV;
+ QualType AtomicTy =
+ E->getPtr()->getType()->getPointeeType().getAtomicUnqualifiedType();
+ if (!EvaluatePointer(E->getPtr(), AtomicLV, Info)) {
+ return false;
+ }
+
+ // dereference T * (expected value)
+ LValue ExpectedLV;
+ QualType ExpectedTy = E->getVal1()->getType()->getPointeeType();
+ if (!EvaluatePointer(E->getVal1(), ExpectedLV, Info)) {
+ return false;
+ }
+
+ // get values for atomic and expected
+ APValue AtomicVal;
+ APValue ExpectedVal;
+
+ // convert pointer to value
+ if (!handleLValueToRValueConversion(Info, E->getPtr(), AtomicTy, AtomicLV,
+ AtomicVal)) {
+ return false;
+ }
+
+ if (!handleLValueToRValueConversion(Info, E->getVal1(), ExpectedTy,
+ ExpectedLV, ExpectedVal)) {
+ return false;
+ }
+
+ bool DoExchange = false;
+
+ // compare atomic<int> and friends
+ if (AtomicTy->isIntegralOrEnumerationType() &&
+ ExpectedTy->isIntegralOrEnumerationType()) {
+ const APSInt AtomicInt = AtomicVal.getInt();
+ const APSInt ExpectedInt = ExpectedVal.getInt();
+ if (AtomicInt == ExpectedInt) {
+ DoExchange = true;
+ }
+ } else if (AtomicTy->isRealFloatingType() &&
+ ExpectedTy->isRealFloatingType()) {
+ const APFloat AtomicFlt = AtomicVal.getFloat();
+ const APFloat ExpectedFlt = ExpectedVal.getFloat();
+ if (AtomicFlt == ExpectedFlt) {
+ DoExchange = true;
+ }
+ } else if (AtomicTy->isPointerType() && ExpectedTy->isPointerType()) {
+ // get LValue of objects pointed to
+ LValue LHS;
+ LHS.setFrom(Info.Ctx, AtomicVal);
+
+ LValue RHS;
+ RHS.setFrom(Info.Ctx, ExpectedVal);
+
+ if (HasSameBase(LHS, RHS)) {
+ const CharUnits &LHSOffset = LHS.getLValueOffset();
+ const CharUnits &RHSOffset = RHS.getLValueOffset();
+
+ const unsigned PtrSize = Info.Ctx.getTypeSize(AtomicTy);
+ assert(PtrSize <= 64 && "Pointer width is larger than expected");
+ const uint64_t Mask = ~0ULL >> (64 - PtrSize);
+
+ const uint64_t CompareLHS = LHSOffset.getQuantity() & Mask;
+ const uint64_t CompareRHS = RHSOffset.getQuantity() & Mask;
+
+ if (CompareLHS == CompareRHS) {
+ DoExchange = true;
+ }
+ } else {
+
+ // it's implementation-defined to compare distinct literals
+ // it's not constant-evaluation
+ if ((IsLiteralLValue(LHS) || IsLiteralLValue(RHS)) && LHS.Base &&
+ RHS.Base) {
+ return false;
+ }
+
+ if (IsWeakLValue(LHS) || IsWeakLValue(RHS)) {
+ return false;
+ }
+
+ if ((!LHS.Base && !LHS.Offset.isZero()) ||
+ (!RHS.Base && !RHS.Offset.isZero())) {
+ return false;
+ }
+
+ if (LHS.Base && LHS.Offset.isZero() &&
+ isOnePastTheEndOfCompleteObject(Info.Ctx, RHS)) {
+ return false;
+ }
+
+ if (RHS.Base && RHS.Offset.isZero() &&
+ isOnePastTheEndOfCompleteObject(Info.Ctx, LHS)) {
+ return false;
+ }
+
+ if ((RHS.Base && isZeroSized(RHS)) || (LHS.Base && isZeroSized(LHS))) {
+ return false;
+ }
+
+ // after all it's a different object
+ DoExchange = false;
+ }
+
+ } else {
+ return false;
+ }
+
+ if (DoExchange) {
+ // if values are same do the exchange with replacement value
+ // but first I must evaluate the replacement value
+ APValue Replacement;
+ if (!Evaluate(Replacement, Info, E->getVal2())) {
+ return false;
+ }
+
+ // and assign it to atomic
+ if (!handleAssignment(Info, E, AtomicLV, AtomicTy, Replacement)) {
+ return false;
+ }
+ }
+
+ // to expected pointer I need to put previous value in atomic
+ if (!handleAssignment(Info, E, ExpectedLV, ExpectedTy, AtomicVal)) {
+ return false;
+ }
+
+ // and return boolean if I did the exchange
+ Result = APValue(Info.Ctx.MakeIntValue(DoExchange, E->getType()));
+ return true;
+ }
+
+ bool VisitAtomicExpr(const AtomicExpr *E) {
+ APValue LocalResult;
+ switch (E->getOp()) {
+ default:
+ return Error(E);
+ case AtomicExpr::AO__c11_atomic_load:
+ case AtomicExpr::AO__atomic_load_n:
+ if (!LoadAtomicValue(E, LocalResult, Info)) {
+ return Error(E);
+ }
+ return DerivedSuccess(LocalResult, E);
+ case AtomicExpr::AO__c11_atomic_compare_exchange_strong:
+ case AtomicExpr::AO__c11_atomic_compare_exchange_weak:
+ case AtomicExpr::AO__atomic_compare_exchange_n:
+ if (!CompareExchangeAtomicValue(E, LocalResult, Info)) {
+ return Error(E);
+ }
+ return DerivedSuccess(LocalResult, E);
+ case AtomicExpr::AO__c11_atomic_exchange:
+ case AtomicExpr::AO__atomic_exchange_n:
+ if (!LoadAtomicValue(E, LocalResult, Info)) {
+ return Error(E);
+ }
+ if (!StoreAtomicValue(E, Info)) {
+ return Error(E);
+ }
+ return DerivedSuccess(LocalResult, E);
+ case AtomicExpr::AO__c11_atomic_fetch_add:
+ case AtomicExpr::AO__c11_atomic_fetch_sub:
+ case AtomicExpr::AO__c11_atomic_fetch_and:
+ case AtomicExpr::AO__c11_atomic_fetch_or:
+ case AtomicExpr::AO__c11_atomic_fetch_xor:
+ case AtomicExpr::AO__c11_atomic_fetch_nand:
+ case AtomicExpr::AO__c11_atomic_fetch_max:
+ case AtomicExpr::AO__c11_atomic_fetch_min:
+ case AtomicExpr::AO__atomic_fetch_add:
+ case AtomicExpr::AO__atomic_fetch_sub:
+ case AtomicExpr::AO__atomic_fetch_and:
+ case AtomicExpr::AO__atomic_fetch_xor:
+ case AtomicExpr::AO__atomic_fetch_or:
+ case AtomicExpr::AO__atomic_fetch_nand:
+ case AtomicExpr::AO__atomic_fetch_max:
+ case AtomicExpr::AO__atomic_fetch_min:
+ if (!FetchAtomicOp(E, LocalResult, Info, false)) {
+ return Error(E);
+ }
+ return DerivedSuccess(LocalResult, E);
+ case AtomicExpr::AO__atomic_add_fetch:
+ case AtomicExpr::AO__atomic_sub_fetch:
+ case AtomicExpr::AO__atomic_and_fetch:
+ case AtomicExpr::AO__atomic_xor_fetch:
+ case AtomicExpr::AO__atomic_or_fetch:
+ case AtomicExpr::AO__atomic_nand_fetch:
+ case AtomicExpr::AO__atomic_max_fetch:
+ case AtomicExpr::AO__atomic_min_fetch:
+ if (!FetchAtomicOp(E, LocalResult, Info, true)) {
+ return Error(E);
+ }
+ return DerivedSuccess(LocalResult, E);
+ }
+ }
+
bool VisitPseudoObjectExpr(const PseudoObjectExpr *E) {
for (const Expr *SemE : E->semantics()) {
if (auto *OVE = dyn_cast<OpaqueValueExpr>(SemE)) {
@@ -15595,6 +16021,17 @@ class VoidExprEvaluator
}
}
+ bool VisitAtomicExpr(const AtomicExpr *E) {
+ switch (E->getOp()) {
+ default:
+ return Error(E);
+ case AtomicExpr::AO__c11_atomic_init:
+ case AtomicExpr::AO__c11_atomic_store:
+ case AtomicExpr::AO__atomic_store_n:
+ return StoreAtomicValue(E, Info);
+ }
+ }
+
bool VisitCallExpr(const CallExpr *E) {
if (!IsConstantEvaluatedBuiltinCall(E))
return ExprEvaluatorBaseTy::VisitCallExpr(E);
diff --git a/libcxx/include/__atomic/atomic.h b/libcxx/include/__atomic/atomic.h
index bd3f659c22df0..fa53bdb7f8855 100644
--- a/libcxx/include/__atomic/atomic.h
+++ b/libcxx/include/__atomic/atomic.h
@@ -67,7 +67,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
using value_type = _Tp*;
using difference_type = ptrdiff_t;
- _LIBCPP_HIDE_FROM_ABI atomic() _NOEXCEPT = default;
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic() _NOEXCEPT = default;
_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
@@ -75,7 +75,7 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
__base::store(__d);
return __d;
}
- _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator=(_Tp* __d) _NOEXCEPT {
__base::store(__d);
return __d;
}
@@ -86,7 +86,8 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp*
+ fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
// __atomic_fetch_add accepts function pointers, guard against them.
static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
@@ -98,24 +99,25 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp*
+ fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
// __atomic_fetch_add accepts function pointers, guard against them.
static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
}
_LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) volatile _NOEXCEPT { return fetch_add(1); }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) _NOEXCEPT { return fetch_add(1); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator++(int) _NOEXCEPT { return fetch_add(1); }
_LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) volatile _NOEXCEPT { return fetch_sub(1); }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) _NOEXCEPT { return fetch_sub(1); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator--(int) _NOEXCEPT { return fetch_sub(1); }
_LIBCPP_HIDE_FROM_ABI _Tp* operator++() volatile _NOEXCEPT { return fetch_add(1) + 1; }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator++() _NOEXCEPT { return fetch_add(1) + 1; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator++() _NOEXCEPT { return fetch_add(1) + 1; }
_LIBCPP_HIDE_FROM_ABI _Tp* operator--() volatile _NOEXCEPT { return fetch_sub(1) - 1; }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator--() _NOEXCEPT { return fetch_sub(1) - 1; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator--() _NOEXCEPT { return fetch_sub(1) - 1; }
_LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT { return fetch_add(__op) + __op; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT { return fetch_add(__op) + __op; }
_LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
atomic& operator=(const atomic&) = delete;
atomic& operator=(const atomic&) volatile = delete;
@@ -151,7 +153,7 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
}
template <class _This, class _Operation, class _BuiltinOp>
- _LIBCPP_HIDE_FROM_ABI static _Tp
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR static _Tp
__rmw_op(_This&& __self, _Tp __operand, memory_order __m, _Operation __operation, _BuiltinOp __builtin_op) {
if constexpr (__has_rmw_builtin()) {
return __builtin_op(std::addressof(std::forward<_This>(__self).__a_), __operand, __m);
@@ -174,7 +176,7 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
}
template <class _This>
- _LIBCPP_HIDE_FROM_ABI static _Tp __fetch_add(_This&& __self, _Tp __operand, memory_order __m) {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR static _Tp __fetch_add(_This&& __self, _Tp __operand, memory_order __m) {
auto __builtin_op = [](auto __a, auto __builtin_operand, auto __order) {
return std::__cxx_atomic_fetch_add(__a, __builtin_operand, __order);
};
@@ -182,7 +184,7 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
}
template <class _This>
- _LIBCPP_HIDE_FROM_ABI static _Tp __fetch_sub(_This&& __self, _Tp __operand, memory_order __m) {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR static _Tp __fetch_sub(_This&& __self, _Tp __operand, memory_order __m) {
auto __builtin_op = [](auto __a, auto __builtin_operand, auto __order) {
return std::__cxx_atomic_fetch_sub(__a, __builtin_operand, __order);
};
@@ -207,7 +209,7 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
__base::store(__d);
return __d;
}
- _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator=(_Tp __d) noexcept {
__base::store(__d);
return __d;
}
@@ -218,7 +220,7 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
return __fetch_add(*this, __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) noexcept {
return __fetch_add(*this, __op, __m);
}
@@ -228,7 +230,7 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
return __fetch_sub(*this, __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) noexcept {
return __fetch_sub(*this, __op, __m);
}
@@ -238,7 +240,7 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
return fetch_add(__op) + __op;
}
- _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) noexcept { return fetch_add(__op) + __op; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator+=(_Tp __op) noexcept { return fetch_add(__op) + __op; }
_LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile noexcept
requires __base::is_always_lock_free
@@ -246,7 +248,7 @@ struct atomic<_Tp> : __atomic_base<_Tp> {
return fetch_sub(__op) - __op;
}
- _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) noexcept { return fetch_sub(__op) - __op; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator-=(_Tp __op) noexcept { return fetch_sub(__op) - __op; }
};
#endif // _LIBCPP_STD_VER >= 20
@@ -259,7 +261,7 @@ _LIBCPP_HIDE_FROM_ABI bool atomic_is_lock_free(const volatile atomic<_Tp>* __o)
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI bool atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT {
return __o->is_lock_free();
}
@@ -272,7 +274,7 @@ atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NO
}
template <class _Tp>
-_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_HIDE_FROM_ABI void
+_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI void
atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
std::__cxx_atomic_init(std::addressof(__o->__a_), __d);
}
@@ -285,7 +287,8 @@ _LIBCPP_HIDE_FROM_ABI void atomic_store(volatile atomic<_Tp>* __o, typename atom
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI void atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
+atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
__o->store(__d);
}
@@ -299,7 +302,7 @@ atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_typ
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI void
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
_LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
__o->store(__d, __m);
@@ -313,7 +316,7 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp atomic_load(const atomic<_Tp>* __o) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp atomic_load(const atomic<_Tp>* __o) _NOEXCEPT {
return __o->load();
}
@@ -326,7 +329,7 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_load_explicit(const volatile atomic<_Tp>* __o,
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
return __o->load(__m);
}
@@ -339,7 +342,8 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_exchange(volatile atomic<_Tp>* __o, typename at
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
return __o->exchange(__d);
}
@@ -352,7 +356,7 @@ atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT {
return __o->exchange(__d, __m);
}
@@ -366,7 +370,7 @@ _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak(
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak(
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_compare_exchange_weak(
atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
return __o->compare_exchange_weak(*__e, __d);
}
@@ -380,7 +384,7 @@ _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong(
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong(
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_compare_exchange_strong(
atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
return __o->compare_exchange_strong(*__e, __d);
}
@@ -398,7 +402,7 @@ _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak_explicit(
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak_explicit(
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_compare_exchange_weak_explicit(
atomic<_Tp>* __o,
typename atomic<_Tp>::value_type* __e,
typename atomic<_Tp>::value_type __d,
@@ -420,7 +424,7 @@ _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong_explicit(
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong_explicit(
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_compare_exchange_strong_explicit(
atomic<_Tp>* __o,
typename atomic<_Tp>::value_type* __e,
typename atomic<_Tp>::value_type __d,
@@ -494,7 +498,8 @@ atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_typ
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT {
return __o->fetch_add(__op);
}
@@ -507,7 +512,7 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_add_explicit(
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT {
return __o->fetch_add(__op, __m);
}
@@ -521,7 +526,8 @@ atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_typ
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT {
return __o->fetch_sub(__op);
}
@@ -534,7 +540,7 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_sub_explicit(
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT {
return __o->fetch_sub(__op, __m);
}
@@ -547,7 +553,8 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and(volatile atomic<_Tp>* __o, typename a
}
template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
-_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
return __o->fetch_and(__op);
}
@@ -560,7 +567,7 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and_explicit(
}
template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
return __o->fetch_and(__op, __m);
}
@@ -573,7 +580,8 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_or(volatile atomic<_Tp>* __o, typename at
}
template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
-_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
return __o->fetch_or(__op);
}
@@ -586,7 +594,7 @@ atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_
}
template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
return __o->fetch_or(__op, __m);
}
@@ -599,7 +607,8 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor(volatile atomic<_Tp>* __o, typename a
}
template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
-_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
return __o->fetch_xor(__op);
}
@@ -612,7 +621,7 @@ _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor_explicit(
}
template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
return __o->fetch_xor(__op, __m);
}
diff --git a/libcxx/include/__atomic/atomic_base.h b/libcxx/include/__atomic/atomic_base.h
index 7e26434c9c3a0..fa59451953516 100644
--- a/libcxx/include/__atomic/atomic_base.h
+++ b/libcxx/include/__atomic/atomic_base.h
@@ -39,14 +39,14 @@ struct __atomic_base // false
_LIBCPP_HIDE_FROM_ABI bool is_lock_free() const volatile _NOEXCEPT {
return __cxx_atomic_is_lock_free(sizeof(__cxx_atomic_impl<_Tp>));
}
- _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const _NOEXCEPT {
- return static_cast<__atomic_base const volatile*>(this)->is_lock_free();
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const _NOEXCEPT {
+ return __cxx_atomic_is_lock_free(sizeof(__cxx_atomic_impl<_Tp>));
}
_LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
_LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
}
- _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
_LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
}
@@ -54,16 +54,16 @@ struct __atomic_base // false
_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
return std::__cxx_atomic_load(std::addressof(__a_), __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
return std::__cxx_atomic_load(std::addressof(__a_), __m);
}
_LIBCPP_HIDE_FROM_ABI operator _Tp() const volatile _NOEXCEPT { return load(); }
- _LIBCPP_HIDE_FROM_ABI operator _Tp() const _NOEXCEPT { return load(); }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI operator _Tp() const _NOEXCEPT { return load(); }
_LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m);
}
_LIBCPP_HIDE_FROM_ABI bool
@@ -71,7 +71,8 @@ struct __atomic_base // false
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
}
- _LIBCPP_HIDE_FROM_ABI bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI bool
+ compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
}
@@ -80,7 +81,8 @@ struct __atomic_base // false
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
}
- _LIBCPP_HIDE_FROM_ABI bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI bool
+ compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
}
@@ -88,7 +90,7 @@ struct __atomic_base // false
compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
}
- _LIBCPP_HIDE_FROM_ABI bool
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI bool
compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
}
@@ -96,7 +98,7 @@ struct __atomic_base // false
compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
}
- _LIBCPP_HIDE_FROM_ABI bool
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI bool
compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
}
@@ -105,23 +107,37 @@ struct __atomic_base // false
volatile _NOEXCEPT {
std::__atomic_wait(*this, __v, __m);
}
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
+ _LIBCPP_AVAILABILITY_SYNC _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI void
wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT {
- std::__atomic_wait(*this, __v, __m);
+ if consteval {
+ if (this->load(__m) != __v) {
+ __builtin_trap();
+ }
+ } else {
+ std::__atomic_wait(*this, __v, __m);
+ }
}
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_one() volatile _NOEXCEPT {
std::__atomic_notify_one(*this);
}
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_one() _NOEXCEPT { std::__atomic_notify_one(*this); }
+ _LIBCPP_AVAILABILITY_SYNC _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI void notify_one() _NOEXCEPT {
+ if !consteval {
+ std::__atomic_notify_one(*this);
+ }
+ }
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_all() volatile _NOEXCEPT {
std::__atomic_notify_all(*this);
}
- _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_all() _NOEXCEPT { std::__atomic_notify_all(*this); }
+ _LIBCPP_AVAILABILITY_SYNC _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI void notify_all() _NOEXCEPT {
+ if !consteval {
+ std::__atomic_notify_all(*this);
+ }
+ }
#if _LIBCPP_STD_VER >= 20
_LIBCPP_HIDE_FROM_ABI constexpr __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
#else
- _LIBCPP_HIDE_FROM_ABI __atomic_base() _NOEXCEPT = default;
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI __atomic_base() _NOEXCEPT = default;
#endif
_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
@@ -142,52 +158,52 @@ struct __atomic_base<_Tp, true> : public __atomic_base<_Tp, false> {
_LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
}
_LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
}
_LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m);
}
_LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m);
}
_LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m);
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m);
}
_LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT { return fetch_add(_Tp(1)); }
- _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) _NOEXCEPT { return fetch_add(_Tp(1)); }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) _NOEXCEPT { return fetch_add(_Tp(1)); }
_LIBCPP_HIDE_FROM_ABI _Tp operator--(int) volatile _NOEXCEPT { return fetch_sub(_Tp(1)); }
- _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) _NOEXCEPT { return fetch_sub(_Tp(1)); }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) _NOEXCEPT { return fetch_sub(_Tp(1)); }
_LIBCPP_HIDE_FROM_ABI _Tp operator++() volatile _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
- _LIBCPP_HIDE_FROM_ABI _Tp operator++() _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator++() _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
_LIBCPP_HIDE_FROM_ABI _Tp operator--() volatile _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
- _LIBCPP_HIDE_FROM_ABI _Tp operator--() _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator--() _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
_LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) _NOEXCEPT { return fetch_add(__op) + __op; }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) _NOEXCEPT { return fetch_add(__op) + __op; }
_LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
_LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) volatile _NOEXCEPT { return fetch_and(__op) & __op; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) _NOEXCEPT { return fetch_and(__op) & __op; }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) _NOEXCEPT { return fetch_and(__op) & __op; }
_LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) volatile _NOEXCEPT { return fetch_or(__op) | __op; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) _NOEXCEPT { return fetch_or(__op) | __op; }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) _NOEXCEPT { return fetch_or(__op) | __op; }
_LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) volatile _NOEXCEPT { return fetch_xor(__op) ^ __op; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) _NOEXCEPT { return fetch_xor(__op) ^ __op; }
+ _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) _NOEXCEPT { return fetch_xor(__op) ^ __op; }
};
// Here we need _IsIntegral because the default template argument is not enough
@@ -205,7 +221,7 @@ struct __atomic_waitable_traits<__atomic_base<_Tp, _IsIntegral> > {
return __this.load(__order);
}
- static _LIBCPP_HIDE_FROM_ABI const __cxx_atomic_impl<_Tp>*
+ static _LIBCPP_CONSTEXPR _LIBCPP_HIDE_FROM_ABI const __cxx_atomic_impl<_Tp>*
__atomic_contention_address(const __atomic_base<_Tp, _IsIntegral>& __a) {
return std::addressof(__a.__a_);
}
diff --git a/libcxx/include/__atomic/atomic_flag.h b/libcxx/include/__atomic/atomic_flag.h
index 00b157cdff78b..7518e88e29b92 100644
--- a/libcxx/include/__atomic/atomic_flag.h
+++ b/libcxx/include/__atomic/atomic_flag.h
@@ -28,23 +28,24 @@ _LIBCPP_BEGIN_NAMESPACE_STD
struct atomic_flag {
__cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
- _LIBCPP_HIDE_FROM_ABI bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT {
return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);
}
- _LIBCPP_HIDE_FROM_ABI bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT {
return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);
}
- _LIBCPP_HIDE_FROM_ABI bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
+ test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);
}
- _LIBCPP_HIDE_FROM_ABI bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT {
return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);
}
- _LIBCPP_HIDE_FROM_ABI void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);
}
- _LIBCPP_HIDE_FROM_ABI void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT {
__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);
}
@@ -84,7 +85,8 @@ struct atomic_flag {
template <>
struct __atomic_waitable_traits<atomic_flag> {
- static _LIBCPP_HIDE_FROM_ABI _LIBCPP_ATOMIC_FLAG_TYPE __atomic_load(const atomic_flag& __a, memory_order __order) {
+ static _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _LIBCPP_ATOMIC_FLAG_TYPE
+ __atomic_load(const atomic_flag& __a, memory_order __order) {
return std::__cxx_atomic_load(&__a.__a_, __order);
}
@@ -93,54 +95,66 @@ struct __atomic_waitable_traits<atomic_flag> {
return std::__cxx_atomic_load(&__a.__a_, __order);
}
- static _LIBCPP_HIDE_FROM_ABI const __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE>*
+ static _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR const __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE>*
__atomic_contention_address(const atomic_flag& __a) {
return std::addressof(__a.__a_);
}
- static _LIBCPP_HIDE_FROM_ABI const volatile __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE>*
+ static _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR const volatile __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE>*
__atomic_contention_address(const volatile atomic_flag& __a) {
return std::addressof(__a.__a_);
}
};
-inline _LIBCPP_HIDE_FROM_ABI bool atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT { return __o->test(); }
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT {
+ return __o->test();
+}
-inline _LIBCPP_HIDE_FROM_ABI bool atomic_flag_test(const atomic_flag* __o) _NOEXCEPT { return __o->test(); }
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_flag_test(const atomic_flag* __o) _NOEXCEPT {
+ return __o->test();
+}
-inline _LIBCPP_HIDE_FROM_ABI bool
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT {
return __o->test(__m);
}
-inline _LIBCPP_HIDE_FROM_ABI bool atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT {
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
+atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT {
return __o->test(__m);
}
-inline _LIBCPP_HIDE_FROM_ABI bool atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT {
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT {
return __o->test_and_set();
}
-inline _LIBCPP_HIDE_FROM_ABI bool atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT { return __o->test_and_set(); }
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT {
+ return __o->test_and_set();
+}
-inline _LIBCPP_HIDE_FROM_ABI bool
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT {
return __o->test_and_set(__m);
}
-inline _LIBCPP_HIDE_FROM_ABI bool atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT {
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
+atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT {
return __o->test_and_set(__m);
}
-inline _LIBCPP_HIDE_FROM_ABI void atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT { __o->clear(); }
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT {
+ __o->clear();
+}
-inline _LIBCPP_HIDE_FROM_ABI void atomic_flag_clear(atomic_flag* __o) _NOEXCEPT { __o->clear(); }
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void atomic_flag_clear(atomic_flag* __o) _NOEXCEPT { __o->clear(); }
-inline _LIBCPP_HIDE_FROM_ABI void atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT {
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
+atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT {
__o->clear(__m);
}
-inline _LIBCPP_HIDE_FROM_ABI void atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT {
+inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
+atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT {
__o->clear(__m);
}
diff --git a/libcxx/include/__atomic/atomic_ref.h b/libcxx/include/__atomic/atomic_ref.h
index 156f1961151c1..1d9e09c60bf7c 100644
--- a/libcxx/include/__atomic/atomic_ref.h
+++ b/libcxx/include/__atomic/atomic_ref.h
@@ -47,10 +47,10 @@ struct __atomic_ref_base {
protected:
_Tp* __ptr_;
- _LIBCPP_HIDE_FROM_ABI __atomic_ref_base(_Tp& __obj) : __ptr_(std::addressof(__obj)) {}
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_ref_base(_Tp& __obj) : __ptr_(std::addressof(__obj)) {}
private:
- _LIBCPP_HIDE_FROM_ABI static _Tp* __clear_padding(_Tp& __val) noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR static _Tp* __clear_padding(_Tp& __val) noexcept {
_Tp* __ptr = std::addressof(__val);
# if __has_builtin(__builtin_clear_padding)
__builtin_clear_padding(__ptr);
@@ -58,7 +58,7 @@ struct __atomic_ref_base {
return __ptr;
}
- _LIBCPP_HIDE_FROM_ABI static bool __compare_exchange(
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR static bool __compare_exchange(
_Tp* __ptr, _Tp* __expected, _Tp* __desired, bool __is_weak, int __success, int __failure) noexcept {
if constexpr (
# if __has_builtin(__builtin_clear_padding)
@@ -107,112 +107,193 @@ struct __atomic_ref_base {
static constexpr bool is_always_lock_free =
__atomic_always_lock_free(sizeof(_Tp), reinterpret_cast<void*>(-required_alignment));
- _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const noexcept { return __atomic_is_lock_free(sizeof(_Tp), __ptr_); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool is_lock_free() const noexcept {
+ return __atomic_is_lock_free(sizeof(_Tp), __ptr_);
+ }
- _LIBCPP_HIDE_FROM_ABI void store(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
+ store(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept
_LIBCPP_CHECK_STORE_MEMORY_ORDER(__order) {
_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
__order == memory_order::relaxed || __order == memory_order::release || __order == memory_order::seq_cst,
"atomic_ref: memory order argument to atomic store operation is invalid");
- __atomic_store(__ptr_, __clear_padding(__desired), std::__to_gcc_order(__order));
+ if consteval {
+ *__ptr_ = __desired;
+ } else {
+ __atomic_store(__ptr_, __clear_padding(__desired), std::__to_gcc_order(__order));
+ }
}
- _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator=(_Tp __desired) const noexcept {
store(__desired);
return __desired;
}
- _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __order = memory_order::seq_cst) const noexcept
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp load(memory_order __order = memory_order::seq_cst) const noexcept
_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__order) {
_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
__order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
__order == memory_order::seq_cst,
"atomic_ref: memory order argument to atomic load operation is invalid");
- alignas(_Tp) byte __mem[sizeof(_Tp)];
- auto* __ret = reinterpret_cast<_Tp*>(__mem);
- __atomic_load(__ptr_, __ret, std::__to_gcc_order(__order));
- return *__ret;
+ if consteval {
+ return *__ptr_;
+ } else {
+ alignas(_Tp) byte __mem[sizeof(_Tp)];
+ auto* __ret = reinterpret_cast<_Tp*>(__mem);
+ __atomic_load(__ptr_, __ret, std::__to_gcc_order(__order));
+ return *__ret;
+ }
}
- _LIBCPP_HIDE_FROM_ABI operator _Tp() const noexcept { return load(); }
-
- _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
- alignas(_Tp) byte __mem[sizeof(_Tp)];
- auto* __ret = reinterpret_cast<_Tp*>(__mem);
- __atomic_exchange(__ptr_, __clear_padding(__desired), __ret, std::__to_gcc_order(__order));
- return *__ret;
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR operator _Tp() const noexcept { return load(); }
+
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ exchange(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
+ if consteval {
+ _Tp tmp = *__ptr_;
+ *__ptr_ = __desired;
+ return tmp;
+ } else {
+ alignas(_Tp) byte __mem[sizeof(_Tp)];
+ auto* __ret = reinterpret_cast<_Tp*>(__mem);
+ __atomic_exchange(__ptr_, __clear_padding(__desired), __ret, std::__to_gcc_order(__order));
+ return *__ret;
+ }
}
- _LIBCPP_HIDE_FROM_ABI bool
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {
_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
__failure == memory_order::relaxed || __failure == memory_order::consume ||
__failure == memory_order::acquire || __failure == memory_order::seq_cst,
"atomic_ref: failure memory order argument to weak atomic compare-and-exchange operation is invalid");
- return __compare_exchange(
- __ptr_,
- std::addressof(__expected),
- std::addressof(__desired),
- true,
- std::__to_gcc_order(__success),
- std::__to_gcc_order(__failure));
- }
- _LIBCPP_HIDE_FROM_ABI bool
+ if consteval {
+ const _Tp __original = *__ptr_;
+ if (__original == __expected) {
+ *__ptr_ = __desired;
+ __expected = __original;
+ return true;
+ } else {
+ __expected = __original;
+ return false;
+ }
+ } else {
+ return __compare_exchange(
+ __ptr_,
+ std::addressof(__expected),
+ std::addressof(__desired),
+ true,
+ std::__to_gcc_order(__success),
+ std::__to_gcc_order(__failure));
+ }
+ }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {
_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
__failure == memory_order::relaxed || __failure == memory_order::consume ||
__failure == memory_order::acquire || __failure == memory_order::seq_cst,
"atomic_ref: failure memory order argument to strong atomic compare-and-exchange operation is invalid");
- return __compare_exchange(
- __ptr_,
- std::addressof(__expected),
- std::addressof(__desired),
- false,
- std::__to_gcc_order(__success),
- std::__to_gcc_order(__failure));
+ if consteval {
+ const _Tp __original = *__ptr_;
+ if (__original == __expected) {
+ *__ptr_ = __desired;
+ __expected = __original;
+ return true;
+ } else {
+ __expected = __original;
+ return false;
+ }
+ } else {
+ return __compare_exchange(
+ __ptr_,
+ std::addressof(__expected),
+ std::addressof(__desired),
+ false,
+ std::__to_gcc_order(__success),
+ std::__to_gcc_order(__failure));
+ }
}
- _LIBCPP_HIDE_FROM_ABI bool
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
- return __compare_exchange(
- __ptr_,
- std::addressof(__expected),
- std::addressof(__desired),
- true,
- std::__to_gcc_order(__order),
- std::__to_gcc_failure_order(__order));
- }
- _LIBCPP_HIDE_FROM_ABI bool
+ if consteval {
+ if (*__ptr_ == __expected) {
+ __expected = *__ptr_;
+ *__ptr_ = __desired;
+ return true;
+ } else {
+ __expected = *__ptr_;
+ return false;
+ }
+ } else {
+ return __compare_exchange(
+ __ptr_,
+ std::addressof(__expected),
+ std::addressof(__desired),
+ true,
+ std::__to_gcc_order(__order),
+ std::__to_gcc_failure_order(__order));
+ }
+ }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool
compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
- return __compare_exchange(
- __ptr_,
- std::addressof(__expected),
- std::addressof(__desired),
- false,
- std::__to_gcc_order(__order),
- std::__to_gcc_failure_order(__order));
+ if consteval {
+ if (*__ptr_ == __expected) {
+ __expected = *__ptr_;
+ *__ptr_ = __desired;
+ return true;
+ } else {
+ __expected = *__ptr_;
+ return false;
+ }
+ } else {
+ return __compare_exchange(
+ __ptr_,
+ std::addressof(__expected),
+ std::addressof(__desired),
+ false,
+ std::__to_gcc_order(__order),
+ std::__to_gcc_failure_order(__order));
+ }
}
- _LIBCPP_HIDE_FROM_ABI void wait(_Tp __old, memory_order __order = memory_order::seq_cst) const noexcept
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
+ wait(_Tp __old, memory_order __order = memory_order::seq_cst) const noexcept
_LIBCPP_CHECK_WAIT_MEMORY_ORDER(__order) {
- _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
- __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
- __order == memory_order::seq_cst,
- "atomic_ref: memory order argument to atomic wait operation is invalid");
- std::__atomic_wait(*this, __old, __order);
+ if consteval {
+ if (*__ptr_ != __old) {
+ __builtin_trap();
+ }
+ } else {
+ _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
+ __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
+ __order == memory_order::seq_cst,
+ "atomic_ref: memory order argument to atomic wait operation is invalid");
+ std::__atomic_wait(*this, __old, __order);
+ }
+ }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void notify_one() const noexcept {
+ if !consteval {
+ std::__atomic_notify_one(*this);
+ }
+ }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void notify_all() const noexcept {
+ if !consteval {
+ std::__atomic_notify_all(*this);
+ }
}
- _LIBCPP_HIDE_FROM_ABI void notify_one() const noexcept { std::__atomic_notify_one(*this); }
- _LIBCPP_HIDE_FROM_ABI void notify_all() const noexcept { std::__atomic_notify_all(*this); }
};
template <class _Tp>
struct __atomic_waitable_traits<__atomic_ref_base<_Tp>> {
- static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_ref_base<_Tp>& __a, memory_order __order) {
+ static _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ __atomic_load(const __atomic_ref_base<_Tp>& __a, memory_order __order) {
return __a.load(__order);
}
- static _LIBCPP_HIDE_FROM_ABI const _Tp* __atomic_contention_address(const __atomic_ref_base<_Tp>& __a) {
+ static _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR const _Tp*
+ __atomic_contention_address(const __atomic_ref_base<_Tp>& __a) {
return __a.__ptr_;
}
};
@@ -223,15 +304,21 @@ struct atomic_ref : public __atomic_ref_base<_Tp> {
using __base = __atomic_ref_base<_Tp>;
- _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit atomic_ref(_Tp& __obj) : __base(__obj) {
_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
+# if __has_builtin(__builtin_is_aligned)
+ __builtin_is_aligned(std::addressof(__obj), __base::required_alignment),
+# else
reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
+# endif
"atomic_ref ctor: referenced object must be aligned to required_alignment");
}
- _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic_ref(const atomic_ref&) noexcept = default;
- _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator=(_Tp __desired) const noexcept {
+ return __base::operator=(__desired);
+ }
atomic_ref& operator=(const atomic_ref&) = delete;
};
@@ -243,43 +330,54 @@ struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
using difference_type = __base::value_type;
- _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit atomic_ref(_Tp& __obj) : __base(__obj) {
_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
+# if __has_builtin(__builtin_is_aligned)
+ __builtin_is_aligned(std::addressof(__obj), __base::required_alignment),
+# else
reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
+# endif
"atomic_ref ctor: referenced object must be aligned to required_alignment");
}
- _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic_ref(const atomic_ref&) noexcept = default;
- _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator=(_Tp __desired) const noexcept {
+ return __base::operator=(__desired);
+ }
atomic_ref& operator=(const atomic_ref&) = delete;
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
return __atomic_fetch_add(this->__ptr_, __arg, std::__to_gcc_order(__order));
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
return __atomic_fetch_sub(this->__ptr_, __arg, std::__to_gcc_order(__order));
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ fetch_and(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
return __atomic_fetch_and(this->__ptr_, __arg, std::__to_gcc_order(__order));
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ fetch_or(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
return __atomic_fetch_or(this->__ptr_, __arg, std::__to_gcc_order(__order));
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ fetch_xor(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
return __atomic_fetch_xor(this->__ptr_, __arg, std::__to_gcc_order(__order));
}
- _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) const noexcept { return fetch_add(_Tp(1)); }
- _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) const noexcept { return fetch_sub(_Tp(1)); }
- _LIBCPP_HIDE_FROM_ABI _Tp operator++() const noexcept { return fetch_add(_Tp(1)) + _Tp(1); }
- _LIBCPP_HIDE_FROM_ABI _Tp operator--() const noexcept { return fetch_sub(_Tp(1)) - _Tp(1); }
- _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __arg) const noexcept { return fetch_and(__arg) & __arg; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __arg) const noexcept { return fetch_or(__arg) | __arg; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __arg) const noexcept { return fetch_xor(__arg) ^ __arg; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator++(int) const noexcept { return fetch_add(_Tp(1)); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator--(int) const noexcept { return fetch_sub(_Tp(1)); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator++() const noexcept { return fetch_add(_Tp(1)) + _Tp(1); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator--() const noexcept { return fetch_sub(_Tp(1)) - _Tp(1); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator&=(_Tp __arg) const noexcept { return fetch_and(__arg) & __arg; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator|=(_Tp __arg) const noexcept { return fetch_or(__arg) | __arg; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator^=(_Tp __arg) const noexcept { return fetch_xor(__arg) ^ __arg; }
};
template <class _Tp>
@@ -289,19 +387,26 @@ struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
using difference_type = __base::value_type;
- _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit atomic_ref(_Tp& __obj) : __base(__obj) {
_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
+# if __has_builtin(__builtin_is_aligned)
+ __builtin_is_aligned(std::addressof(__obj), __base::required_alignment),
+# else
reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
+# endif
"atomic_ref ctor: referenced object must be aligned to required_alignment");
}
- _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic_ref(const atomic_ref&) noexcept = default;
- _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator=(_Tp __desired) const noexcept {
+ return __base::operator=(__desired);
+ }
atomic_ref& operator=(const atomic_ref&) = delete;
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
_Tp __old = this->load(memory_order_relaxed);
_Tp __new = __old + __arg;
while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {
@@ -309,7 +414,8 @@ struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
}
return __old;
}
- _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+ fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
_Tp __old = this->load(memory_order_relaxed);
_Tp __new = __old - __arg;
while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {
@@ -318,8 +424,8 @@ struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
return __old;
}
- _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
- _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
};
template <class _Tp>
@@ -328,25 +434,33 @@ struct atomic_ref<_Tp*> : public __atomic_ref_base<_Tp*> {
using difference_type = ptrdiff_t;
- _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp*& __ptr) : __base(__ptr) {}
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit atomic_ref(_Tp*& __ptr) : __base(__ptr) {}
- _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __desired) const noexcept { return __base::operator=(__desired); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator=(_Tp* __desired) const noexcept {
+ return __base::operator=(__desired);
+ }
atomic_ref& operator=(const atomic_ref&) = delete;
- _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp*
+ fetch_add(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
return __atomic_fetch_add(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));
}
- _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp*
+ fetch_sub(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
return __atomic_fetch_sub(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));
}
- _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) const noexcept { return fetch_add(1); }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) const noexcept { return fetch_sub(1); }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator++() const noexcept { return fetch_add(1) + 1; }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator--() const noexcept { return fetch_sub(1) - 1; }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __arg) const noexcept { return fetch_add(__arg) + __arg; }
- _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __arg) const noexcept { return fetch_sub(__arg) - __arg; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator++(int) const noexcept { return fetch_add(1); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator--(int) const noexcept { return fetch_sub(1); }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator++() const noexcept { return fetch_add(1) + 1; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator--() const noexcept { return fetch_sub(1) - 1; }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator+=(ptrdiff_t __arg) const noexcept {
+ return fetch_add(__arg) + __arg;
+ }
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp* operator-=(ptrdiff_t __arg) const noexcept {
+ return fetch_sub(__arg) - __arg;
+ }
};
_LIBCPP_CTAD_SUPPORTED_FOR_TYPE(atomic_ref);
diff --git a/libcxx/include/__atomic/cxx_atomic_impl.h b/libcxx/include/__atomic/cxx_atomic_impl.h
index 18e88aa97bec7..71087081c2237 100644
--- a/libcxx/include/__atomic/cxx_atomic_impl.h
+++ b/libcxx/include/__atomic/cxx_atomic_impl.h
@@ -30,7 +30,7 @@ _LIBCPP_BEGIN_NAMESPACE_STD
// the default operator= in an object is not volatile, a byte-by-byte copy
// is required.
template <typename _Tp, typename _Tv, __enable_if_t<is_assignable<_Tp&, _Tv>::value, int> = 0>
-_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
__a_value = __val;
}
template <typename _Tp, typename _Tv, __enable_if_t<is_assignable<_Tp&, _Tv>::value, int> = 0>
@@ -44,7 +44,7 @@ _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_assign_volatile(_Tp volatile& __a_value,
template <typename _Tp>
struct __cxx_atomic_base_impl {
- _LIBCPP_HIDE_FROM_ABI
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR
# ifndef _LIBCPP_CXX03_LANG
__cxx_atomic_base_impl() _NOEXCEPT = default;
# else
@@ -61,15 +61,15 @@ _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
__a->__a_value = __val;
}
-_LIBCPP_HIDE_FROM_ABI inline void __cxx_atomic_thread_fence(memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR inline void __cxx_atomic_thread_fence(memory_order __order) {
__atomic_thread_fence(__to_gcc_order(__order));
}
-_LIBCPP_HIDE_FROM_ABI inline void __cxx_atomic_signal_fence(memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR inline void __cxx_atomic_signal_fence(memory_order __order) {
__atomic_signal_fence(__to_gcc_order(__order));
}
@@ -80,7 +80,8 @@ __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val, memory_
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
+__cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, memory_order __order) {
__atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order));
}
@@ -98,13 +99,14 @@ __cxx_atomic_load_inplace(const volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp*
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI void
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
__cxx_atomic_load_inplace(const __cxx_atomic_base_impl<_Tp>* __a, _Tp* __dst, memory_order __order) {
__atomic_load(std::addressof(__a->__a_value), __dst, __to_gcc_order(__order));
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+__cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
_Tp __ret;
__atomic_load(std::addressof(__a->__a_value), std::addressof(__ret), __to_gcc_order(__order));
return __ret;
@@ -120,7 +122,8 @@ __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __value, me
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value, memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+__cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value, memory_order __order) {
_Tp __ret;
__atomic_exchange(
std::addressof(__a->__a_value), std::addressof(__value), std::addressof(__ret), __to_gcc_order(__order));
@@ -144,7 +147,7 @@ _LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_strong(
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_strong(
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool __cxx_atomic_compare_exchange_strong(
__cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) {
return __atomic_compare_exchange(
std::addressof(__a->__a_value),
@@ -172,7 +175,7 @@ _LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_weak(
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_weak(
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool __cxx_atomic_compare_exchange_weak(
__cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) {
return __atomic_compare_exchange(
std::addressof(__a->__a_value),
@@ -207,7 +210,8 @@ __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a, _Td __delta, m
}
template <typename _Tp, typename _Td>
-_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+__cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, memory_order __order) {
return __atomic_fetch_add(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
}
@@ -218,7 +222,8 @@ __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a, _Td __delta, m
}
template <typename _Tp, typename _Td>
-_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+__cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, memory_order __order) {
return __atomic_fetch_sub(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
}
@@ -229,7 +234,7 @@ __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
__cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
return __atomic_fetch_and(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
}
@@ -241,7 +246,8 @@ __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+__cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
return __atomic_fetch_or(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
}
@@ -252,7 +258,7 @@ __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
}
template <typename _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
__cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) {
return __atomic_fetch_xor(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
}
@@ -263,7 +269,7 @@ __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_o
template <typename _Tp>
struct __cxx_atomic_base_impl {
- _LIBCPP_HIDE_FROM_ABI
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR
# ifndef _LIBCPP_CXX03_LANG
__cxx_atomic_base_impl() _NOEXCEPT = default;
# else
@@ -276,11 +282,11 @@ struct __cxx_atomic_base_impl {
# define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
-_LIBCPP_HIDE_FROM_ABI inline void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR inline void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
__c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
}
-_LIBCPP_HIDE_FROM_ABI inline void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR inline void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
__c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
}
@@ -289,7 +295,7 @@ _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatil
__c11_atomic_init(std::addressof(__a->__a_value), __val);
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) _NOEXCEPT {
__c11_atomic_init(std::addressof(__a->__a_value), __val);
}
@@ -299,7 +305,7 @@ __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_
__c11_atomic_store(std::addressof(__a->__a_value), __val, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI void
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
__cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, memory_order __order) _NOEXCEPT {
__c11_atomic_store(std::addressof(__a->__a_value), __val, static_cast<__memory_order_underlying_t>(__order));
}
@@ -312,7 +318,8 @@ __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order
const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
+__cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
using __ptr_type = __remove_const_t<decltype(__a->__a_value)>*;
return __c11_atomic_load(
const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
@@ -326,7 +333,7 @@ __cxx_atomic_load_inplace(__cxx_atomic_base_impl<_Tp> const volatile* __a, _Tp*
const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI void
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR void
__cxx_atomic_load_inplace(__cxx_atomic_base_impl<_Tp> const* __a, _Tp* __dst, memory_order __order) _NOEXCEPT {
using __ptr_type = __remove_const_t<decltype(__a->__a_value)>*;
*__dst = __c11_atomic_load(
@@ -340,13 +347,13 @@ __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, me
std::addressof(__a->__a_value), __value, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
__cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value, memory_order __order) _NOEXCEPT {
return __c11_atomic_exchange(
std::addressof(__a->__a_value), __value, static_cast<__memory_order_underlying_t>(__order));
}
-_LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR inline memory_order __to_failure_order(memory_order __order) {
// Avoid switch statement to make this a constexpr.
return __order == memory_order_release
? memory_order_relaxed
@@ -368,7 +375,7 @@ _LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_strong(
static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_strong(
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool __cxx_atomic_compare_exchange_strong(
__cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure)
_NOEXCEPT {
return __c11_atomic_compare_exchange_strong(
@@ -394,7 +401,7 @@ _LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_weak(
static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_weak(
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR bool __cxx_atomic_compare_exchange_weak(
__cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure)
_NOEXCEPT {
return __c11_atomic_compare_exchange_weak(
@@ -412,7 +419,7 @@ __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, m
std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
__cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
return __c11_atomic_fetch_add(
std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
@@ -425,7 +432,7 @@ __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __d
std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp*
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp*
__cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*>* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
return __c11_atomic_fetch_add(
std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
@@ -438,7 +445,7 @@ __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, m
std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
__cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
return __c11_atomic_fetch_sub(
std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
@@ -450,7 +457,7 @@ __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __d
std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp*
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp*
__cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*>* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
return __c11_atomic_fetch_sub(
std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
@@ -463,7 +470,7 @@ __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern,
std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
__cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
return __c11_atomic_fetch_and(
std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
@@ -476,7 +483,7 @@ __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern,
std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
__cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
return __c11_atomic_fetch_or(
std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
@@ -489,7 +496,7 @@ __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern,
std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
}
template <class _Tp>
-_LIBCPP_HIDE_FROM_ABI _Tp
+_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR _Tp
__cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
return __c11_atomic_fetch_xor(
std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
@@ -501,7 +508,7 @@ template <typename _Tp, typename _Base = __cxx_atomic_base_impl<_Tp> >
struct __cxx_atomic_impl : public _Base {
static_assert(is_trivially_copyable<_Tp>::value, "std::atomic<T> requires that 'T' be a trivially copyable type");
- _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl() _NOEXCEPT = default;
+ _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __cxx_atomic_impl() _NOEXCEPT = default;
_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT : _Base(__value) {}
};
More information about the cfe-commits
mailing list