From 4f769a9207a05e14042d86ac13f406e281e8c1be Mon Sep 17 00:00:00 2001 From: _Redstone_c_ Date: Tue, 12 Apr 2022 08:33:19 +0800 Subject: [PATCH] feat(templates): add TAtomic and the corresponding testing --- .../Private/Testing/TemplatesTesting.cpp | 111 +++++++ .../Source/Public/Templates/Atomic.h | 280 ++++++++++++++++++ .../Source/Public/Templates/Templates.h | 1 + .../Source/Public/Testing/TemplatesTesting.h | 1 + 4 files changed, 393 insertions(+) create mode 100644 Redcraft.Utility/Source/Public/Templates/Atomic.h diff --git a/Redcraft.Utility/Source/Private/Testing/TemplatesTesting.cpp b/Redcraft.Utility/Source/Private/Testing/TemplatesTesting.cpp index dd5ee8f..1e3e474 100644 --- a/Redcraft.Utility/Source/Private/Testing/TemplatesTesting.cpp +++ b/Redcraft.Utility/Source/Private/Testing/TemplatesTesting.cpp @@ -23,6 +23,7 @@ void TestTemplates() TestAny(); TestTuple(); TestFunction(); + TestAtomic(); TestMiscTemplates(); } @@ -1241,6 +1242,116 @@ void TestFunction() } } +void TestAtomic() +{ + { + TAtomic TempA; + + always_check(TempA.bIsAlwaysLockFree); + always_check(TempA.IsLockFree()); + always_check((TempA = 11) == 11); + TempA.Store(12); + always_check(TempA.Load() == 12); + always_check((int32)TempA == 12); + always_check(TempA.Exchange(13) == 12); + int32 TempB = 13; + always_check(TempA.CompareExchange(TempB, 15) == true); + always_check(TempA.CompareExchange(TempB, 15) == false); + always_check(TempA.CompareExchange(TempB, 15) == true); + TempA.Wait(13); + TempA.Notify(); + always_check(TempA.FetchAdd(1) == 15); + always_check(TempA.FetchSub(1) == 16); + always_check(TempA.FetchMul(3) == 15); + always_check(TempA.FetchDiv(3) == 45); + always_check(TempA.FetchMod(16) == 15); + always_check(TempA.FetchAnd(0xFF) == 15); + always_check(TempA.FetchOr(0xFFFF) == 0xF); + always_check(TempA.FetchXor(0xFF) == 0xFFFF); + always_check(TempA.FetchLsh(4) == 0xFF00); + always_check(TempA.FetchRsh(4) == 0xFF000); + always_check(++TempA == 0xFF01); + always_check(TempA++ == 0xFF01); + always_check(--TempA == 0xFF01); + always_check(TempA-- == 0xFF01); + always_check((TempA += 1) == 0xFF01); + always_check((TempA -= 1) == 0xFF00); + always_check((TempA *= 16) == 0xFF000); + always_check((TempA /= 16) == 0xFF00); + always_check((TempA %= 0x1000) == 0xF00); + always_check((TempA &= 1) == 0x0); + always_check((TempA |= 1) == 0x1); + always_check((TempA ^= 0xF) == 0xE); + always_check((TempA <<= 4) == 0xE0); + always_check((TempA >>= 4) == 0xE); + } + + { + int32 A; + TAtomicRef TempA(A); + + always_check(TempA.bIsAlwaysLockFree); + always_check(TempA.IsLockFree()); + always_check((TempA = 11) == 11); + TempA.Store(12); + always_check(TempA.Load() == 12); + always_check((int32)TempA == 12); + always_check(TempA.Exchange(13) == 12); + int32 TempB = 13; + always_check(TempA.CompareExchange(TempB, 15) == true); + always_check(TempA.CompareExchange(TempB, 15) == false); + always_check(TempA.CompareExchange(TempB, 15) == true); + TempA.Wait(13); + TempA.Notify(); + always_check(TempA.FetchAdd(1) == 15); + always_check(TempA.FetchSub(1) == 16); + always_check(TempA.FetchMul(3) == 15); + always_check(TempA.FetchDiv(3) == 45); + always_check(TempA.FetchMod(16) == 15); + always_check(TempA.FetchAnd(0xFF) == 15); + always_check(TempA.FetchOr(0xFFFF) == 0xF); + always_check(TempA.FetchXor(0xFF) == 0xFFFF); + always_check(TempA.FetchLsh(4) == 0xFF00); + always_check(TempA.FetchRsh(4) == 0xFF000); + always_check(++TempA == 0xFF01); + always_check(TempA++ == 0xFF01); + always_check(--TempA == 0xFF01); + always_check(TempA-- == 0xFF01); + always_check((TempA += 1) == 0xFF01); + always_check((TempA -= 1) == 0xFF00); + always_check((TempA *= 16) == 0xFF000); + always_check((TempA /= 16) == 0xFF00); + always_check((TempA %= 0x1000) == 0xF00); + always_check((TempA &= 1) == 0x0); + always_check((TempA |= 1) == 0x1); + always_check((TempA ^= 0xF) == 0xE); + always_check((TempA <<= 4) == 0xE0); + always_check((TempA >>= 4) == 0xE); + } + + { + FAtomicFlag Flag; + + always_check(Flag.TestAndSet() == false); + always_check(Flag.Test() == true); + Flag.Clear(); + always_check(Flag.Test() == false); + Flag.Wait(true); + Flag.Notify(); + } + + { + int32 TempA = 10; + int32 TempB = KillDependency(TempA); + always_check(TempB == 10); + } + + { + AtomicThreadFence(); + AtomicSignalFence(); + } +} + NAMESPACE_UNNAMED_BEGIN template diff --git a/Redcraft.Utility/Source/Public/Templates/Atomic.h b/Redcraft.Utility/Source/Public/Templates/Atomic.h new file mode 100644 index 0000000..832ed2a --- /dev/null +++ b/Redcraft.Utility/Source/Public/Templates/Atomic.h @@ -0,0 +1,280 @@ +#pragma once + +#include "CoreTypes.h" +#include "Templates/Invoke.h" +#include "Memory/Alignment.h" +#include "Templates/Function.h" +#include "TypeTraits/TypeTraits.h" +#include "Templates/Noncopyable.h" + +#include + +NAMESPACE_REDCRAFT_BEGIN +NAMESPACE_MODULE_BEGIN(Redcraft) +NAMESPACE_MODULE_BEGIN(Utility) + +enum class EMemoryOrder : uint8 +{ + Relaxed = static_cast::Type>(NAMESPACE_STD::memory_order_relaxed), + Consume = static_cast::Type>(NAMESPACE_STD::memory_order_consume), + Acquire = static_cast::Type>(NAMESPACE_STD::memory_order_acquire), + Release = static_cast::Type>(NAMESPACE_STD::memory_order_release), + AcquireRelease = static_cast::Type>(NAMESPACE_STD::memory_order_acq_rel), + SequentiallyConsistent = static_cast::Type>(NAMESPACE_STD::memory_order_seq_cst), +}; + +#if BUILD_DEBUG + +NAMESPACE_PRIVATE_BEGIN + +FORCEINLINE void MemoryOrderCheck(EMemoryOrder Order, uint8 Require) +{ + switch (Order) + { + case EMemoryOrder::Relaxed: checkf((Require) & 0x01, "Invalid memory order."); break; + case EMemoryOrder::Consume: checkf((Require) & 0x02, "Invalid memory order."); break; + case EMemoryOrder::Acquire: checkf((Require) & 0x04, "Invalid memory order."); break; + case EMemoryOrder::Release: checkf((Require) & 0x08, "Invalid memory order."); break; + case EMemoryOrder::AcquireRelease: checkf((Require) & 0x10, "Invalid memory order."); break; + case EMemoryOrder::SequentiallyConsistent: checkf((Require) & 0x20, "Invalid memory order."); break; + default: check_no_entry(); + } +} + +NAMESPACE_PRIVATE_END + +#define MEMORY_ORDER_CHECK(Order, Require) NAMESPACE_PRIVATE::MemoryOrderCheck(Order, Require) + +#else + +#define MEMORY_ORDER_CHECK(Order, Require) + +#endif + +template requires TIsTriviallyCopyable::Value + && TIsCopyConstructible::Value && TIsMoveConstructible::Value + && TIsCopyAssignable::Value && TIsMoveAssignable::Value +struct TAtomic : public FSingleton +{ +protected: + + using ElementType = typename TConditional, NAMESPACE_STD::atomic>::Type; + +public: + + using ValueType = T; + + static constexpr bool bIsAlwaysLockFree = ElementType::is_always_lock_free; + + static constexpr size_t RequiredAlignment = NAMESPACE_STD::atomic_ref::required_alignment; + + constexpr TAtomic() requires (!bIsRef) : Element() { }; + constexpr TAtomic(ValueType Desired) requires (!bIsRef) : Element(Desired) { }; + + FORCEINLINE explicit TAtomic(ValueType& Desired) requires (bIsRef) : Element(Desired) { check(Memory::IsAligned(&Desired, RequiredAlignment)); }; + FORCEINLINE TAtomic(TAtomic& InValue) requires (bIsRef) : Element(InValue) { }; + + FORCEINLINE ValueType operator=(ValueType Desired) { return Element = Desired; } + FORCEINLINE ValueType operator=(ValueType Desired) volatile requires bIsAlwaysLockFree { return Element = Desired; } + + FORCEINLINE bool IsLockFree() const { return Element.is_lock_free(); } + FORCEINLINE bool IsLockFree() const volatile { return Element.is_lock_free(); } + + FORCEINLINE void Store(ValueType Desired, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) { MEMORY_ORDER_CHECK(Order, 0x01 | 0x08 | 0x20); Element.store(Desired, static_cast(Order)); } + FORCEINLINE void Store(ValueType Desired, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires bIsAlwaysLockFree { MEMORY_ORDER_CHECK(Order, 0x01 | 0x08 | 0x20); Element.store(Desired, static_cast(Order)); } + + FORCEINLINE ValueType Load(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) const { MEMORY_ORDER_CHECK(Order, 0x01 | 0x02 | 0x04 | 0x20); return Element.load(static_cast(Order)); } + FORCEINLINE ValueType Load(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) const volatile requires bIsAlwaysLockFree { MEMORY_ORDER_CHECK(Order, 0x01 | 0x02 | 0x04 | 0x20); return Element.load(static_cast(Order)); } + + FORCEINLINE operator ValueType() const { return static_cast(Element); } + FORCEINLINE operator ValueType() const volatile requires bIsAlwaysLockFree { return static_cast(Element); } + + FORCEINLINE ValueType Exchange(ValueType Desired, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) { return Element.exchange(Desired, static_cast(Order)); } + FORCEINLINE ValueType Exchange(ValueType Desired, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires bIsAlwaysLockFree { return Element.exchange(Desired, static_cast(Order)); } + + FORCEINLINE bool CompareExchange(ValueType& Expected, ValueType Desired, EMemoryOrder Success, EMemoryOrder Failure, bool bIsWeak = false) + { + MEMORY_ORDER_CHECK(Failure, 0x01 | 0x02 | 0x04 | 0x20); + if (bIsWeak) return Element.compare_exchange_weak(Expected, Desired, static_cast(Success), static_cast(Failure)); + else return Element.compare_exchange_strong(Expected, Desired, static_cast(Success), static_cast(Failure)); + } + + FORCEINLINE bool CompareExchange(ValueType& Expected, ValueType Desired, EMemoryOrder Success, EMemoryOrder Failure, bool bIsWeak = false) volatile requires bIsAlwaysLockFree + { + MEMORY_ORDER_CHECK(Failure, 0x01 | 0x02 | 0x04 | 0x20); + if (bIsWeak) return Element.compare_exchange_weak(Expected, Desired, static_cast(Success), static_cast(Failure)); + else return Element.compare_exchange_strong(Expected, Desired, static_cast(Success), static_cast(Failure)); + } + + FORCEINLINE bool CompareExchange(ValueType& Expected, ValueType Desired, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent, bool bIsWeak = false) + { + if (bIsWeak) return Element.compare_exchange_weak(Expected, Desired, static_cast(Order)); + else return Element.compare_exchange_strong(Expected, Desired, static_cast(Order)); + } + + FORCEINLINE bool CompareExchange(ValueType& Expected, ValueType Desired, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent, bool bIsWeak = false) volatile requires bIsAlwaysLockFree + { + if (bIsWeak) return Element.compare_exchange_weak(Expected, Desired, static_cast(Order)); + else return Element.compare_exchange_strong(Expected, Desired, static_cast(Order)); + } + + FORCEINLINE void Wait(ValueType Old, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) { MEMORY_ORDER_CHECK(Order, 0x01 | 0x02 | 0x04 | 0x20); Element.wait(Old, static_cast(Order)); } + FORCEINLINE void Wait(ValueType Old, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile { MEMORY_ORDER_CHECK(Order, 0x01 | 0x02 | 0x04 | 0x20); Element.wait(Old, static_cast(Order)); } + + FORCEINLINE void Notify(bool bIsAll = false) { if (bIsAll) Element.notify_all(); else Element.notify_one(); } + FORCEINLINE void Notify(bool bIsAll = false) volatile { if (bIsAll) Element.notify_all(); else Element.notify_one(); } + + template requires TIsInvocableResult::Value + FORCEINLINE ValueType FetchFn(F&& Func, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) + { + ValueType Temp(Load(EMemoryOrder::Relaxed)); + while (!CompareExchange(Temp, InvokeResult(Forward(Func), Temp), Order)); + return Temp; + } + + template requires TIsInvocableResult::Value && bIsAlwaysLockFree + FORCEINLINE ValueType FetchFn(F&& Func, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile + { + ValueType Temp(Load(EMemoryOrder::Relaxed)); + while (!CompareExchange(Temp, InvokeResult(Forward(Func), Temp), Order)); + return Temp; + } + + FORCEINLINE ValueType FetchAdd(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires (TIsIntegral::Value || TIsFloatingPoint::Value) { return Element.fetch_add(InValue, static_cast(Order)); } + FORCEINLINE ValueType FetchAdd(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires (TIsIntegral::Value || TIsFloatingPoint::Value) && bIsAlwaysLockFree { return Element.fetch_add(InValue, static_cast(Order)); } + + FORCEINLINE ValueType FetchAdd(ptrdiff InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires TIsPointer::Value { return Element.fetch_add(InValue, static_cast(Order)); } + FORCEINLINE ValueType FetchAdd(ptrdiff InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires TIsPointer::Value && bIsAlwaysLockFree { return Element.fetch_add(InValue, static_cast(Order)); } + + FORCEINLINE ValueType FetchSub(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires (TIsIntegral::Value || TIsFloatingPoint::Value) { return Element.fetch_sub(InValue, static_cast(Order)); } + FORCEINLINE ValueType FetchSub(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires (TIsIntegral::Value || TIsFloatingPoint::Value) && bIsAlwaysLockFree { return Element.fetch_sub(InValue, static_cast(Order)); } + + FORCEINLINE ValueType FetchSub(ptrdiff InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires TIsPointer::Value { return Element.fetch_sub(InValue, static_cast(Order)); } + FORCEINLINE ValueType FetchSub(ptrdiff InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires TIsPointer::Value && bIsAlwaysLockFree { return Element.fetch_sub(InValue, static_cast(Order)); } + + FORCEINLINE ValueType FetchMul(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires (TIsIntegral::Value || TIsFloatingPoint::Value) { return FetchFn([InValue](ValueType Old) -> ValueType { return Old * InValue; }); } + FORCEINLINE ValueType FetchMul(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires (TIsIntegral::Value || TIsFloatingPoint::Value) && bIsAlwaysLockFree { return FetchFn([InValue](ValueType Old) -> ValueType { return Old * InValue; }); } + + FORCEINLINE ValueType FetchDiv(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires (TIsIntegral::Value || TIsFloatingPoint::Value) { return FetchFn([InValue](ValueType Old) -> ValueType { return Old / InValue; }); } + FORCEINLINE ValueType FetchDiv(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires (TIsIntegral::Value || TIsFloatingPoint::Value) && bIsAlwaysLockFree { return FetchFn([InValue](ValueType Old) -> ValueType { return Old / InValue; }); } + + FORCEINLINE ValueType FetchMod(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires TIsIntegral::Value { return FetchFn([InValue](ValueType Old) -> ValueType { return Old % InValue; }); } + FORCEINLINE ValueType FetchMod(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return FetchFn([InValue](ValueType Old) -> ValueType { return Old % InValue; }); } + + FORCEINLINE ValueType FetchAnd(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires TIsIntegral::Value { return Element.fetch_and(InValue, static_cast(Order)); } + FORCEINLINE ValueType FetchAnd(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return Element.fetch_and(InValue, static_cast(Order)); } + + FORCEINLINE ValueType FetchOr(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires TIsIntegral::Value { return Element.fetch_or(InValue, static_cast(Order)); } + FORCEINLINE ValueType FetchOr(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return Element.fetch_or(InValue, static_cast(Order)); } + + FORCEINLINE ValueType FetchXor(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires TIsIntegral::Value { return Element.fetch_xor(InValue, static_cast(Order)); } + FORCEINLINE ValueType FetchXor(ValueType InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return Element.fetch_xor(InValue, static_cast(Order)); } + + FORCEINLINE ValueType FetchLsh(size_t InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires TIsIntegral::Value { return FetchFn([InValue](ValueType Old) -> ValueType { return Old << InValue; }); } + FORCEINLINE ValueType FetchLsh(size_t InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return FetchFn([InValue](ValueType Old) -> ValueType { return Old << InValue; }); } + + FORCEINLINE ValueType FetchRsh(size_t InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) requires TIsIntegral::Value { return FetchFn([InValue](ValueType Old) -> ValueType { return Old >> InValue; }); } + FORCEINLINE ValueType FetchRsh(size_t InValue, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return FetchFn([InValue](ValueType Old) -> ValueType { return Old >> InValue; }); } + + FORCEINLINE ValueType operator++() requires (TIsIntegral::Value || TIsPointer::Value) { return ++Element; } + FORCEINLINE ValueType operator++() volatile requires (TIsIntegral::Value || TIsPointer::Value) && bIsAlwaysLockFree { return ++Element; } + + FORCEINLINE ValueType operator++(int) requires (TIsIntegral::Value || TIsPointer::Value) { return Element++; } + FORCEINLINE ValueType operator++(int) volatile requires (TIsIntegral::Value || TIsPointer::Value) && bIsAlwaysLockFree { return Element++; } + + FORCEINLINE ValueType operator--() requires (TIsIntegral::Value || TIsPointer::Value) { return --Element; } + FORCEINLINE ValueType operator--() volatile requires (TIsIntegral::Value || TIsPointer::Value) && bIsAlwaysLockFree { return --Element; } + + FORCEINLINE ValueType operator--(int) requires (TIsIntegral::Value || TIsPointer::Value) { return Element--; } + FORCEINLINE ValueType operator--(int) volatile requires (TIsIntegral::Value || TIsPointer::Value) && bIsAlwaysLockFree { return Element--; } + + FORCEINLINE ValueType operator+=(ValueType InValue) requires (TIsIntegral::Value || TIsFloatingPoint::Value) { return Element += InValue; } + FORCEINLINE ValueType operator+=(ValueType InValue) volatile requires (TIsIntegral::Value || TIsFloatingPoint::Value) && bIsAlwaysLockFree { return Element += InValue; } + + FORCEINLINE ValueType operator+=(ptrdiff InValue) requires TIsPointer::Value { return Element += InValue; } + FORCEINLINE ValueType operator+=(ptrdiff InValue) volatile requires TIsPointer::Value && bIsAlwaysLockFree { return Element += InValue; } + + FORCEINLINE ValueType operator-=(ValueType InValue) requires (TIsIntegral::Value || TIsFloatingPoint::Value) { return Element -= InValue; } + FORCEINLINE ValueType operator-=(ValueType InValue) volatile requires (TIsIntegral::Value || TIsFloatingPoint::Value) && bIsAlwaysLockFree { return Element -= InValue; } + + FORCEINLINE ValueType operator-=(ptrdiff InValue) requires TIsPointer::Value { return Element -= InValue; } + FORCEINLINE ValueType operator-=(ptrdiff InValue) volatile requires TIsPointer::Value && bIsAlwaysLockFree { return Element -= InValue; } + + FORCEINLINE ValueType operator*=(ValueType InValue) requires (TIsIntegral::Value || TIsFloatingPoint::Value) { return FetchMul(InValue) * InValue; } + FORCEINLINE ValueType operator*=(ValueType InValue) volatile requires (TIsIntegral::Value || TIsFloatingPoint::Value) && bIsAlwaysLockFree { return FetchMul(InValue) * InValue; } + + FORCEINLINE ValueType operator/=(ValueType InValue) requires (TIsIntegral::Value || TIsFloatingPoint::Value) { return FetchDiv(InValue) / InValue; } + FORCEINLINE ValueType operator/=(ValueType InValue) volatile requires (TIsIntegral::Value || TIsFloatingPoint::Value) && bIsAlwaysLockFree { return FetchDiv(InValue) / InValue; } + + FORCEINLINE ValueType operator%=(ValueType InValue) requires TIsIntegral::Value { return FetchMod(InValue) % InValue; } + FORCEINLINE ValueType operator%=(ValueType InValue) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return FetchMod(InValue) % InValue; } + + FORCEINLINE ValueType operator&=(ValueType InValue) requires TIsIntegral::Value { return Element &= InValue; } + FORCEINLINE ValueType operator&=(ValueType InValue) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return Element &= InValue; } + + FORCEINLINE ValueType operator|=(ValueType InValue) requires TIsIntegral::Value { return Element |= InValue; } + FORCEINLINE ValueType operator|=(ValueType InValue) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return Element |= InValue; } + + FORCEINLINE ValueType operator^=(ValueType InValue) requires TIsIntegral::Value { return Element ^= InValue; } + FORCEINLINE ValueType operator^=(ValueType InValue) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return Element ^= InValue; } + + FORCEINLINE ValueType operator<<=(size_t InValue) requires TIsIntegral::Value { return FetchLsh(InValue) << InValue; } + FORCEINLINE ValueType operator<<=(size_t InValue) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return FetchLsh(InValue) << InValue; } + + FORCEINLINE ValueType operator>>=(size_t InValue) requires TIsIntegral::Value { return FetchRsh(InValue) >> InValue; } + FORCEINLINE ValueType operator>>=(size_t InValue) volatile requires TIsIntegral::Value && bIsAlwaysLockFree { return FetchRsh(InValue) >> InValue; } + +protected: + + ElementType Element; + +}; + +template +using TAtomicRef = TAtomic; + +template +TAtomic(T) -> TAtomic; + +struct FAtomicFlag : public FSingleton +{ +public: + + constexpr FAtomicFlag() : Element() { }; + + FORCEINLINE void Clear(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) { MEMORY_ORDER_CHECK(Order, 0x01 | 0x08 | 0x20); Element.clear(static_cast(Order)); } + FORCEINLINE void Clear(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile { MEMORY_ORDER_CHECK(Order, 0x01 | 0x08 | 0x20); Element.clear(static_cast(Order)); } + + FORCEINLINE bool TestAndSet(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) { return Element.test_and_set(static_cast(Order)); } + FORCEINLINE bool TestAndSet(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) volatile { return Element.test_and_set(static_cast(Order)); } + + FORCEINLINE bool Test(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) const { MEMORY_ORDER_CHECK(Order, 0x01 | 0x02 | 0x04 | 0x20); return Element.test(static_cast(Order)); } + FORCEINLINE bool Test(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) const volatile { MEMORY_ORDER_CHECK(Order, 0x01 | 0x02 | 0x04 | 0x20); return Element.test(static_cast(Order)); } + + FORCEINLINE void Wait(bool Old, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) const { MEMORY_ORDER_CHECK(Order, 0x01 | 0x02 | 0x04 | 0x20); const_cast(Element).wait(Old, static_cast(Order)); } + FORCEINLINE void Wait(bool Old, EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) const volatile { MEMORY_ORDER_CHECK(Order, 0x01 | 0x02 | 0x04 | 0x20); const_cast(Element).wait(Old, static_cast(Order)); } + + FORCEINLINE void Notify(bool bIsAll = false) { if (bIsAll) const_cast(Element).notify_all(); else const_cast(Element).notify_one(); } + FORCEINLINE void Notify(bool bIsAll = false) volatile { if (bIsAll) const_cast(Element).notify_all(); else const_cast(Element).notify_one(); } + +protected: + + NAMESPACE_STD::atomic_flag Element; + +}; + +template +inline T KillDependency(T InValue) +{ + T Temp(InValue); + return Temp; +} + +extern "C" FORCEINLINE void AtomicThreadFence(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) { NAMESPACE_STD::atomic_thread_fence(static_cast(Order)); } +extern "C" FORCEINLINE void AtomicSignalFence(EMemoryOrder Order = EMemoryOrder::SequentiallyConsistent) { NAMESPACE_STD::atomic_signal_fence(static_cast(Order)); } + +#undef MEMORY_ORDER_CHECK + +NAMESPACE_MODULE_END(Utility) +NAMESPACE_MODULE_END(Redcraft) +NAMESPACE_REDCRAFT_END diff --git a/Redcraft.Utility/Source/Public/Templates/Templates.h b/Redcraft.Utility/Source/Public/Templates/Templates.h index 57f58d4..b03fec6 100644 --- a/Redcraft.Utility/Source/Public/Templates/Templates.h +++ b/Redcraft.Utility/Source/Public/Templates/Templates.h @@ -13,3 +13,4 @@ #include "Templates/Tuple.h" #include "Templates/TypeHash.h" #include "Templates/Function.h" +#include "Templates/Atomic.h" diff --git a/Redcraft.Utility/Source/Public/Testing/TemplatesTesting.h b/Redcraft.Utility/Source/Public/Testing/TemplatesTesting.h index c92a87a..58d567a 100644 --- a/Redcraft.Utility/Source/Public/Testing/TemplatesTesting.h +++ b/Redcraft.Utility/Source/Public/Testing/TemplatesTesting.h @@ -16,6 +16,7 @@ REDCRAFTUTILITY_API void TestVariant(); REDCRAFTUTILITY_API void TestAny(); REDCRAFTUTILITY_API void TestTuple(); REDCRAFTUTILITY_API void TestFunction(); +REDCRAFTUTILITY_API void TestAtomic(); REDCRAFTUTILITY_API void TestMiscTemplates(); NAMESPACE_END(Testing)