1#ifndef ORIGINAL_ATOMIC_H
2#define ORIGINAL_ATOMIC_H
6#if ORIGINAL_COMPILER_GCC || ORIGINAL_COMPILER_CLANG
8#elif ORIGINAL_COMPILER_MSVC
9#ifndef WIN32_LEAN_AND_MEAN
10#define WIN32_LEAN_AND_MEAN
20#if ORIGINAL_COMPILER_GCC || ORIGINAL_COMPILER_CLANG
35 RELAXED = __ATOMIC_RELAXED,
36 ACQUIRE = __ATOMIC_ACQUIRE,
37 RELEASE = __ATOMIC_RELEASE,
38 ACQ_REL = __ATOMIC_ACQ_REL,
39 SEQ_CST = __ATOMIC_SEQ_CST,
44 template<
typename TYPE,
bool USE_LOCK>
55 template<
typename TYPE>
56 using atomic = atomicImpl<
58 !( std::is_trivially_copyable_v<TYPE> &&
59 std::is_trivially_destructible_v<TYPE> &&
60 __atomic_always_lock_free(
sizeof(TYPE),
nullptr) )
72 template<
typename TYPE>
73 class atomicImpl<TYPE, false> {
74 alignas(TYPE)
byte data_[
sizeof(TYPE)]{};
86 explicit atomicImpl(TYPE value, memOrder order = SEQ_CST);
90 static constexpr auto RELAXED = memOrder::RELAXED;
91 static constexpr auto ACQUIRE = memOrder::ACQUIRE;
92 static constexpr auto RELEASE = memOrder::RELEASE;
93 static constexpr auto ACQ_REL = memOrder::ACQ_REL;
94 static constexpr auto SEQ_CST = memOrder::SEQ_CST;
97 atomicImpl(
const atomicImpl&) =
delete;
98 atomicImpl(atomicImpl&&) =
delete;
99 atomicImpl&
operator=(
const atomicImpl&) =
delete;
100 atomicImpl&
operator=(atomicImpl&&) =
delete;
106 static constexpr bool isLockFree() noexcept;
113 void store(TYPE value, memOrder order = SEQ_CST);
120 TYPE load(memOrder order = SEQ_CST) const noexcept;
126 TYPE operator*() const noexcept;
132 explicit operator TYPE() const noexcept;
138 atomicImpl& operator=(TYPE value) noexcept;
145 atomicImpl& operator+=(TYPE value) noexcept;
152 atomicImpl& operator-=(TYPE value) noexcept;
160 TYPE exchange(TYPE value, memOrder order = SEQ_CST) noexcept;
169 bool exchangeCmp(TYPE& expected, TYPE desired, memOrder order = SEQ_CST) noexcept;
172 ~atomicImpl() = default;
191 template<typename TYPE>
192 class atomicImpl<TYPE, true> {
193 mutable pMutex mutex_;
194 alternative<TYPE> data_;
199 atomicImpl() =
default;
206 explicit atomicImpl(TYPE value, memOrder order = RELEASE);
210 static constexpr auto RELAXED = memOrder::RELAXED;
211 static constexpr auto ACQUIRE = memOrder::ACQUIRE;
212 static constexpr auto RELEASE = memOrder::RELEASE;
213 static constexpr auto ACQ_REL = memOrder::ACQ_REL;
214 static constexpr auto SEQ_CST = memOrder::SEQ_CST;
217 atomicImpl(
const atomicImpl&) =
delete;
218 atomicImpl(atomicImpl&&) =
delete;
219 atomicImpl&
operator=(
const atomicImpl&) =
delete;
220 atomicImpl&
operator=(atomicImpl&&) =
delete;
226 static constexpr bool isLockFree() noexcept;
233 void store(TYPE value, memOrder order = SEQ_CST);
240 TYPE load(memOrder order = SEQ_CST) const noexcept;
246 TYPE operator*() const noexcept;
252 explicit operator TYPE() const noexcept;
258 atomicImpl& operator=(TYPE value) noexcept;
265 atomicImpl& operator+=(TYPE value) noexcept;
272 atomicImpl& operator-=(TYPE value) noexcept;
280 TYPE exchange(const TYPE& value, memOrder order = SEQ_CST) noexcept;
289 bool exchangeCmp(TYPE& expected, const TYPE& desired, memOrder order = SEQ_CST) noexcept;
292 ~atomicImpl() = default;
301#elif ORIGINAL_COMPILER_MSVC
302 enum class memOrder {
310 template<
typename TYPE>
311 constexpr bool isLockFreeType() noexcept;
313 template<typename TYPE,
bool USE_LOCK>
316 template<typename TYPE>
317 using atomic = atomicImpl<TYPE, !isLockFreeType<TYPE>()>;
319 template<typename TYPE>
320 class atomicImpl<TYPE, false>
322 using val_type = some_t<
sizeof(TYPE) == 4, LONG, LONG64>;
324 alignas(TYPE)
volatile TYPE data_{};
326 template<
typename From,
typename To>
327 static To atomicCastTo(From value);
329 template<
typename To,
typename From>
330 static To atomicCastBack(From value);
334 explicit atomicImpl(TYPE value, memOrder order = SEQ_CST);
336 static constexpr auto RELAXED = memOrder::RELAXED;
337 static constexpr auto ACQUIRE = memOrder::ACQUIRE;
338 static constexpr auto RELEASE = memOrder::RELEASE;
339 static constexpr auto ACQ_REL = memOrder::ACQ_REL;
340 static constexpr auto SEQ_CST = memOrder::SEQ_CST;
342 atomicImpl(
const atomicImpl&) =
delete;
343 atomicImpl(atomicImpl&&) =
delete;
344 atomicImpl&
operator=(
const atomicImpl&) =
delete;
345 atomicImpl&
operator=(atomicImpl&&) =
delete;
347 static constexpr bool isLockFree() noexcept;
349 void store(TYPE value, memOrder order = SEQ_CST);
351 TYPE load(memOrder order = SEQ_CST) const noexcept;
353 TYPE operator*() const noexcept;
355 explicit operator TYPE() const noexcept;
357 atomicImpl& operator=(TYPE value) noexcept;
359 atomicImpl& operator+=(TYPE value) noexcept;
361 atomicImpl& operator-=(TYPE value) noexcept;
363 TYPE exchange(TYPE value, memOrder order = SEQ_CST) noexcept;
365 bool exchangeCmp(TYPE& expected, TYPE desired, memOrder order = SEQ_CST) noexcept;
367 ~atomicImpl() = default;
376 template<typename TYPE>
377 class atomicImpl<TYPE, true>
379 mutable wMutex mutex_;
380 alternative<TYPE> data_;
382 atomicImpl() =
default;
384 explicit atomicImpl(TYPE value, memOrder order = RELEASE);
386 static constexpr auto RELAXED = memOrder::RELAXED;
387 static constexpr auto ACQUIRE = memOrder::ACQUIRE;
388 static constexpr auto RELEASE = memOrder::RELEASE;
389 static constexpr auto ACQ_REL = memOrder::ACQ_REL;
390 static constexpr auto SEQ_CST = memOrder::SEQ_CST;
392 atomicImpl(
const atomicImpl&) =
delete;
393 atomicImpl(atomicImpl&&) =
delete;
394 atomicImpl&
operator=(
const atomicImpl&) =
delete;
395 atomicImpl&
operator=(atomicImpl&&) =
delete;
397 static constexpr bool isLockFree() noexcept;
399 void store(TYPE value, memOrder order = SEQ_CST);
401 TYPE load(memOrder order = SEQ_CST) const noexcept;
403 TYPE operator*() const noexcept;
405 explicit operator TYPE() const noexcept;
407 atomicImpl& operator=(TYPE value) noexcept;
409 atomicImpl& operator+=(TYPE value) noexcept;
411 atomicImpl& operator-=(TYPE value) noexcept;
413 TYPE exchange(const TYPE& value, memOrder order = SEQ_CST) noexcept;
415 bool exchangeCmp(TYPE& expected, const TYPE& desired, memOrder order = SEQ_CST) noexcept;
417 ~atomicImpl() = default;
433 template<
typename TYPE>
442 template<
typename TYPE>
447#if ORIGINAL_COMPILER_GCC || ORIGINAL_COMPILER_CLANG
448template <
typename TYPE>
450 std::memset(this->data_,
byte{},
sizeof(TYPE));
453template <
typename TYPE>
455 __atomic_store(
reinterpret_cast<TYPE*
>(this->data_), &value,
static_cast<integer
>(order));
458template <
typename TYPE>
463template <
typename TYPE>
465 __atomic_store(
reinterpret_cast<TYPE*
>(this->data_), &value,
static_cast<integer
>(order));
468template <
typename TYPE>
471 __atomic_load(
reinterpret_cast<const TYPE*
>(this->data_), &result,
static_cast<integer
>(order));
475template <
typename TYPE>
481template <
typename TYPE>
487template <
typename TYPE>
490 this->store(std::move(value));
494template <
typename TYPE>
497 __atomic_fetch_add(
reinterpret_cast<TYPE*
>(this->data_), value,
static_cast<integer
>(memOrder::SEQ_CST));
501template <
typename TYPE>
504 __atomic_fetch_sub(
reinterpret_cast<TYPE*
>(this->data_), value,
static_cast<integer
>(memOrder::SEQ_CST));
508template <
typename TYPE>
511 __atomic_exchange(
reinterpret_cast<TYPE*
>(this->data_), &value,
512 &result,
static_cast<integer
>(order));
516template <
typename TYPE>
519 return __atomic_compare_exchange_n(
reinterpret_cast<TYPE*
>(this->data_),
520 &expected, std::move(desired),
false,
521 static_cast<integer
>(order),
static_cast<integer
>(order));
524template <
typename TYPE>
526 uniqueLock lock{this->mutex_};
527 this->data_.set(value);
530template <
typename TYPE>
535template <
typename TYPE>
537 uniqueLock lock{this->mutex_};
538 this->data_.set(value);
541template <
typename TYPE>
543 uniqueLock lock{this->mutex_};
547template <
typename TYPE>
553template <
typename TYPE>
559template <
typename TYPE>
563 this->store(std::move(value));
567template <
typename TYPE>
570 uniqueLock lock{this->mutex_};
571 TYPE result = *this->data_ + value;
572 this->data_.set(result);
576template <
typename TYPE>
579 uniqueLock lock{this->mutex_};
580 TYPE result = *this->data_ - value;
581 this->data_.set(result);
585template <
typename TYPE>
587 uniqueLock lock{this->mutex_};
588 TYPE result = *this->data_;
589 this->data_.set(value);
593template <
typename TYPE>
595 uniqueLock lock{this->mutex_};
596 if (*this->data_ == expected) {
597 this->data_.set(desired);
600 expected = *this->data_;
603#elif ORIGINAL_COMPILER_MSVC
605template<
typename TYPE>
607 return sizeof(TYPE) == 4 ||
sizeof(TYPE) == 8;
610template <
typename TYPE>
611template <
typename From,
typename To>
614 if constexpr (std::is_pointer_v<From>) {
615 return reinterpret_cast<To
>(value);
617 return static_cast<To
>(value);
621template <
typename TYPE>
622template <
typename To,
typename From>
625 if constexpr (std::is_pointer_v<To>) {
626 return reinterpret_cast<To
>(value);
628 return static_cast<To
>(value);
632template <
typename TYPE>
635 std::memset(this->data_,
byte{},
sizeof(TYPE));
638template <
typename TYPE>
641 this->store(value, order);
644template <
typename TYPE>
651template <
typename TYPE>
655 case memOrder::RELAXED:
658 case memOrder::RELEASE:
662 case memOrder::SEQ_CST:
664 if constexpr (
sizeof(TYPE) == 4) {
665 InterlockedExchange(
reinterpret_cast<volatile val_type*
>(&this->data_),
666 atomicCastTo<TYPE, val_type>(value));
668 InterlockedExchange64(
reinterpret_cast<volatile val_type*
>(&this->data_),
669 atomicCastTo<TYPE, val_type>(value));
675template <
typename TYPE>
680 case memOrder::RELAXED:
683 case memOrder::ACQUIRE:
687 case memOrder::SEQ_CST:
689 if constexpr (
sizeof(TYPE) == 4) {
690 return atomicCastBack<TYPE, val_type>(
691 InterlockedCompareExchange(
692 reinterpret_cast<volatile LONG*
>(
const_cast<TYPE*
>(&this->data_)),
695 return atomicCastBack<TYPE, val_type>(
696 InterlockedCompareExchange64(
697 reinterpret_cast<volatile LONG64*
>(
const_cast<TYPE*
>(&this->data_)),
705template <
typename TYPE>
711template <
typename TYPE>
717template <
typename TYPE>
721 this->store(std::move(value));
725template <
typename TYPE>
729 if constexpr (
sizeof(TYPE) == 4) {
730 InterlockedAdd(
reinterpret_cast<volatile val_type*
>(&this->data_),
731 atomicCastTo<TYPE, val_type>(value));
733 InterlockedAdd64(
reinterpret_cast<volatile val_type*
>(&this->data_),
734 atomicCastTo<TYPE, val_type>(value));
739template <
typename TYPE>
743 return *
this += -value;
746template <
typename TYPE>
749 if constexpr (
sizeof(TYPE) == 4) {
750 return atomicCastBack<TYPE, val_type>(
751 InterlockedExchange(
reinterpret_cast<volatile val_type*
>(&this->data_),
752 atomicCastTo<TYPE, val_type>(value))
755 return atomicCastBack<TYPE, val_type>(
756 InterlockedExchange64(
reinterpret_cast<volatile val_type*
>(&this->data_),
757 atomicCastTo<TYPE, val_type>(value))
762template <
typename TYPE>
765 if constexpr (
sizeof(TYPE) == 4) {
766 val_type old = InterlockedCompareExchange(
767 reinterpret_cast<volatile val_type*
>(&this->data_),
768 atomicCastTo<TYPE, val_type>(desired),
769 atomicCastTo<TYPE, val_type>(expected));
770 const bool success = old == atomicCastTo<TYPE, val_type>(expected);
771 if (!success) expected = atomicCastBack<TYPE, val_type>(old);
774 val_type old = InterlockedCompareExchange64(
775 reinterpret_cast<volatile val_type*
>(&this->data_),
776 atomicCastTo<TYPE, val_type>(desired),
777 atomicCastTo<TYPE, val_type>(expected));
778 const bool success = old == atomicCastTo<TYPE, val_type>(expected);
779 if (!success) expected = atomicCastBack<TYPE, val_type>(old);
784template <
typename TYPE>
786 uniqueLock lock{this->mutex_};
787 this->data_.set(value);
790template <
typename TYPE>
795template <
typename TYPE>
797 uniqueLock lock{this->mutex_};
798 this->data_.set(value);
801template <
typename TYPE>
803 uniqueLock lock{this->mutex_};
807template <
typename TYPE>
813template <
typename TYPE>
819template <
typename TYPE>
823 this->store(std::move(value));
827template <
typename TYPE>
830 uniqueLock lock{this->mutex_};
831 TYPE result = *this->data_ + value;
832 this->data_.set(result);
836template <
typename TYPE>
839 uniqueLock lock{this->mutex_};
840 TYPE result = *this->data_ - value;
841 this->data_.set(result);
845template <
typename TYPE>
847 uniqueLock lock{this->mutex_};
848 TYPE result = *this->data_;
849 this->data_.set(value);
853template <
typename TYPE>
855 uniqueLock lock{this->mutex_};
856 if (*this->data_ == expected) {
857 this->data_.set(desired);
860 expected = *this->data_;
866template<
typename TYPE>
872template<
typename TYPE>
Unique ownership smart pointer with move semantics.
Definition ownerPtr.h:37
ownerPtr & operator=(const ownerPtr &other)=delete
Copy assignment prohibited.
Platform-independent type definitions and compiler/platform detection.
Cross-platform mutex and lock management utilities.
Main namespace for the project Original.
Definition algorithms.h:21
auto makeAtomic()
Creates a default-constructed atomic object.
Definition atomic.h:867
Type-safe optional value container.