ORIGINAL
Loading...
Searching...
No Matches
atomic.h
1#ifndef ORIGINAL_ATOMIC_H
2#define ORIGINAL_ATOMIC_H
3
4#include "config.h"
5
6#if ORIGINAL_COMPILER_GCC || ORIGINAL_COMPILER_CLANG
7#include <cstring>
8#elif ORIGINAL_COMPILER_MSVC
9#ifndef WIN32_LEAN_AND_MEAN
10#define WIN32_LEAN_AND_MEAN
11#endif
12#include <Windows.h>
13#include <intrin.h>
14#endif
15
16#include "optional.h"
17#include "mutex.h"
18
19namespace original {
20#if ORIGINAL_COMPILER_GCC || ORIGINAL_COMPILER_CLANG
21
22 // ==================== Memory Order Enumeration ====================
23
34 enum class memOrder {
35 RELAXED = __ATOMIC_RELAXED,
36 ACQUIRE = __ATOMIC_ACQUIRE,
37 RELEASE = __ATOMIC_RELEASE,
38 ACQ_REL = __ATOMIC_ACQ_REL,
39 SEQ_CST = __ATOMIC_SEQ_CST,
40 };
41
42 // ==================== Forward Declarations ====================
43
44 template<typename TYPE, bool USE_LOCK>
45 class atomicImpl;
46
55 template<typename TYPE>
56 using atomic = atomicImpl<
57 TYPE,
58 !( std::is_trivially_copyable_v<TYPE> &&
59 std::is_trivially_destructible_v<TYPE> &&
60 __atomic_always_lock_free(sizeof(TYPE), nullptr) )
61 >;
62
63 // ==================== Lock-Free Implementation ====================
64
72 template<typename TYPE>
73 class atomicImpl<TYPE, false> {
74 alignas(TYPE) byte data_[sizeof(TYPE)]{};
75
79 atomicImpl();
80
86 explicit atomicImpl(TYPE value, memOrder order = SEQ_CST);
87
88 public:
89 // Memory ordering constants for convenience
90 static constexpr auto RELAXED = memOrder::RELAXED;
91 static constexpr auto ACQUIRE = memOrder::ACQUIRE;
92 static constexpr auto RELEASE = memOrder::RELEASE;
93 static constexpr auto ACQ_REL = memOrder::ACQ_REL;
94 static constexpr auto SEQ_CST = memOrder::SEQ_CST;
95
96 // Disable copying and moving
97 atomicImpl(const atomicImpl&) = delete;
98 atomicImpl(atomicImpl&&) = delete;
99 atomicImpl& operator=(const atomicImpl&) = delete;
100 atomicImpl& operator=(atomicImpl&&) = delete;
101
106 static constexpr bool isLockFree() noexcept;
107
113 void store(TYPE value, memOrder order = SEQ_CST);
114
120 TYPE load(memOrder order = SEQ_CST) const noexcept;
121
126 TYPE operator*() const noexcept;
127
132 explicit operator TYPE() const noexcept;
133
138 atomicImpl& operator=(TYPE value) noexcept;
139
145 atomicImpl& operator+=(TYPE value) noexcept;
146
152 atomicImpl& operator-=(TYPE value) noexcept;
153
160 TYPE exchange(TYPE value, memOrder order = SEQ_CST) noexcept;
161
169 bool exchangeCmp(TYPE& expected, TYPE desired, memOrder order = SEQ_CST) noexcept;
170
172 ~atomicImpl() = default;
173
174 // Friend factory functions
175 template<typename T>
176 friend auto makeAtomic();
177
178 template<typename T>
179 friend auto makeAtomic(T value);
180 };
181
182 // ==================== Mutex-Based Implementation ====================
183
191 template<typename TYPE>
192 class atomicImpl<TYPE, true> {
193 mutable pMutex mutex_;
194 alternative<TYPE> data_;
195
199 atomicImpl() = default;
200
206 explicit atomicImpl(TYPE value, memOrder order = RELEASE);
207
208 public:
209 // Memory ordering constants (for API compatibility)
210 static constexpr auto RELAXED = memOrder::RELAXED;
211 static constexpr auto ACQUIRE = memOrder::ACQUIRE;
212 static constexpr auto RELEASE = memOrder::RELEASE;
213 static constexpr auto ACQ_REL = memOrder::ACQ_REL;
214 static constexpr auto SEQ_CST = memOrder::SEQ_CST;
215
216 // Disable copying and moving
217 atomicImpl(const atomicImpl&) = delete;
218 atomicImpl(atomicImpl&&) = delete;
219 atomicImpl& operator=(const atomicImpl&) = delete;
220 atomicImpl& operator=(atomicImpl&&) = delete;
221
226 static constexpr bool isLockFree() noexcept;
227
233 void store(TYPE value, memOrder order = SEQ_CST);
234
240 TYPE load(memOrder order = SEQ_CST) const noexcept;
241
246 TYPE operator*() const noexcept;
247
252 explicit operator TYPE() const noexcept;
253
258 atomicImpl& operator=(TYPE value) noexcept;
259
265 atomicImpl& operator+=(TYPE value) noexcept;
266
272 atomicImpl& operator-=(TYPE value) noexcept;
273
280 TYPE exchange(const TYPE& value, memOrder order = SEQ_CST) noexcept;
281
289 bool exchangeCmp(TYPE& expected, const TYPE& desired, memOrder order = SEQ_CST) noexcept;
290
292 ~atomicImpl() = default;
293
294 // Friend factory functions
295 template<typename T>
296 friend auto makeAtomic();
297
298 template<typename T>
299 friend auto makeAtomic(T value);
300 };
301#elif ORIGINAL_COMPILER_MSVC
302 enum class memOrder {
303 RELAXED,
304 ACQUIRE,
305 RELEASE,
306 ACQ_REL,
307 SEQ_CST,
308 };
309
310 template<typename TYPE>
311 constexpr bool isLockFreeType() noexcept;
312
313 template<typename TYPE, bool USE_LOCK>
314 class atomicImpl;
315
316 template<typename TYPE>
317 using atomic = atomicImpl<TYPE, !isLockFreeType<TYPE>()>;
318
319 template<typename TYPE>
320 class atomicImpl<TYPE, false>
321 {
322 using val_type = some_t<sizeof(TYPE) == 4, LONG, LONG64>;
323
324 alignas(TYPE) volatile TYPE data_{};
325
326 template<typename From, typename To>
327 static To atomicCastTo(From value);
328
329 template<typename To, typename From>
330 static To atomicCastBack(From value);
331
332 atomicImpl();
333
334 explicit atomicImpl(TYPE value, memOrder order = SEQ_CST);
335 public:
336 static constexpr auto RELAXED = memOrder::RELAXED;
337 static constexpr auto ACQUIRE = memOrder::ACQUIRE;
338 static constexpr auto RELEASE = memOrder::RELEASE;
339 static constexpr auto ACQ_REL = memOrder::ACQ_REL;
340 static constexpr auto SEQ_CST = memOrder::SEQ_CST;
341
342 atomicImpl(const atomicImpl&) = delete;
343 atomicImpl(atomicImpl&&) = delete;
344 atomicImpl& operator=(const atomicImpl&) = delete;
345 atomicImpl& operator=(atomicImpl&&) = delete;
346
347 static constexpr bool isLockFree() noexcept;
348
349 void store(TYPE value, memOrder order = SEQ_CST);
350
351 TYPE load(memOrder order = SEQ_CST) const noexcept;
352
353 TYPE operator*() const noexcept;
354
355 explicit operator TYPE() const noexcept;
356
357 atomicImpl& operator=(TYPE value) noexcept;
358
359 atomicImpl& operator+=(TYPE value) noexcept;
360
361 atomicImpl& operator-=(TYPE value) noexcept;
362
363 TYPE exchange(TYPE value, memOrder order = SEQ_CST) noexcept;
364
365 bool exchangeCmp(TYPE& expected, TYPE desired, memOrder order = SEQ_CST) noexcept;
366
367 ~atomicImpl() = default;
368
369 template<typename T>
370 friend auto makeAtomic();
371
372 template<typename T>
373 friend auto makeAtomic(T value);
374 };
375
376 template<typename TYPE>
377 class atomicImpl<TYPE, true>
378 {
379 mutable wMutex mutex_;
380 alternative<TYPE> data_;
381
382 atomicImpl() = default;
383
384 explicit atomicImpl(TYPE value, memOrder order = RELEASE);
385 public:
386 static constexpr auto RELAXED = memOrder::RELAXED;
387 static constexpr auto ACQUIRE = memOrder::ACQUIRE;
388 static constexpr auto RELEASE = memOrder::RELEASE;
389 static constexpr auto ACQ_REL = memOrder::ACQ_REL;
390 static constexpr auto SEQ_CST = memOrder::SEQ_CST;
391
392 atomicImpl(const atomicImpl&) = delete;
393 atomicImpl(atomicImpl&&) = delete;
394 atomicImpl& operator=(const atomicImpl&) = delete;
395 atomicImpl& operator=(atomicImpl&&) = delete;
396
397 static constexpr bool isLockFree() noexcept;
398
399 void store(TYPE value, memOrder order = SEQ_CST);
400
401 TYPE load(memOrder order = SEQ_CST) const noexcept;
402
403 TYPE operator*() const noexcept;
404
405 explicit operator TYPE() const noexcept;
406
407 atomicImpl& operator=(TYPE value) noexcept;
408
409 atomicImpl& operator+=(TYPE value) noexcept;
410
411 atomicImpl& operator-=(TYPE value) noexcept;
412
413 TYPE exchange(const TYPE& value, memOrder order = SEQ_CST) noexcept;
414
415 bool exchangeCmp(TYPE& expected, const TYPE& desired, memOrder order = SEQ_CST) noexcept;
416
417 ~atomicImpl() = default;
418
419 template<typename T>
420 friend auto makeAtomic();
421
422 template<typename T>
423 friend auto makeAtomic(T value);
424 };
425#endif
426 // ==================== Factory Functions ====================
427
433 template<typename TYPE>
434 auto makeAtomic();
435
442 template<typename TYPE>
443 auto makeAtomic(TYPE value);
444
445} // namespace original
446
447#if ORIGINAL_COMPILER_GCC || ORIGINAL_COMPILER_CLANG
448template <typename TYPE>
450 std::memset(this->data_, byte{}, sizeof(TYPE));
451}
452
453template <typename TYPE>
454original::atomicImpl<TYPE, false>::atomicImpl(TYPE value, memOrder order) {
455 __atomic_store(reinterpret_cast<TYPE*>(this->data_), &value, static_cast<integer>(order));
456}
457
458template <typename TYPE>
459constexpr bool original::atomicImpl<TYPE, false>::isLockFree() noexcept {
460 return true;
461}
462
463template <typename TYPE>
464void original::atomicImpl<TYPE, false>::store(TYPE value, memOrder order) {
465 __atomic_store(reinterpret_cast<TYPE*>(this->data_), &value, static_cast<integer>(order));
466}
467
468template <typename TYPE>
469TYPE original::atomicImpl<TYPE, false>::load(memOrder order) const noexcept {
470 TYPE result;
471 __atomic_load(reinterpret_cast<const TYPE*>(this->data_), &result, static_cast<integer>(order));
472 return result;
473}
474
475template <typename TYPE>
477{
478 return this->load();
479}
480
481template <typename TYPE>
483{
484 return this->load();
485}
486
487template <typename TYPE>
489{
490 this->store(std::move(value));
491 return *this;
492}
493
494template <typename TYPE>
496{
497 __atomic_fetch_add(reinterpret_cast<TYPE*>(this->data_), value, static_cast<integer>(memOrder::SEQ_CST));
498 return *this;
499}
500
501template <typename TYPE>
503{
504 __atomic_fetch_sub(reinterpret_cast<TYPE*>(this->data_), value, static_cast<integer>(memOrder::SEQ_CST));
505 return *this;
506}
507
508template <typename TYPE>
509TYPE original::atomicImpl<TYPE, false>::exchange(TYPE value, memOrder order) noexcept {
510 TYPE result;
511 __atomic_exchange(reinterpret_cast<TYPE*>(this->data_), &value,
512 &result, static_cast<integer>(order));
513 return result;
514}
515
516template <typename TYPE>
517bool original::atomicImpl<TYPE, false>::exchangeCmp(TYPE& expected, TYPE desired, memOrder order) noexcept
518{
519 return __atomic_compare_exchange_n(reinterpret_cast<TYPE*>(this->data_),
520 &expected, std::move(desired), false,
521 static_cast<integer>(order), static_cast<integer>(order));
522}
523
524template <typename TYPE>
526 uniqueLock lock{this->mutex_};
527 this->data_.set(value);
528}
529
530template <typename TYPE>
531constexpr bool original::atomicImpl<TYPE, true>::isLockFree() noexcept {
532 return false;
533}
534
535template <typename TYPE>
536void original::atomicImpl<TYPE, true>::store(TYPE value, memOrder) {
537 uniqueLock lock{this->mutex_};
538 this->data_.set(value);
539}
540
541template <typename TYPE>
542TYPE original::atomicImpl<TYPE, true>::load(memOrder) const noexcept {
543 uniqueLock lock{this->mutex_};
544 return *this->data_;
545}
546
547template <typename TYPE>
549{
550 return this->load();
551}
552
553template <typename TYPE>
555{
556 return this->load();
557}
558
559template <typename TYPE>
562{
563 this->store(std::move(value));
564 return *this;
565}
566
567template <typename TYPE>
569{
570 uniqueLock lock{this->mutex_};
571 TYPE result = *this->data_ + value;
572 this->data_.set(result);
573 return *this;
574}
575
576template <typename TYPE>
578{
579 uniqueLock lock{this->mutex_};
580 TYPE result = *this->data_ - value;
581 this->data_.set(result);
582 return *this;
583}
584
585template <typename TYPE>
586TYPE original::atomicImpl<TYPE, true>::exchange(const TYPE& value, memOrder) noexcept {
587 uniqueLock lock{this->mutex_};
588 TYPE result = *this->data_;
589 this->data_.set(value);
590 return result;
591}
592
593template <typename TYPE>
594bool original::atomicImpl<TYPE, true>::exchangeCmp(TYPE& expected, const TYPE& desired, memOrder) noexcept {
595 uniqueLock lock{this->mutex_};
596 if (*this->data_ == expected) {
597 this->data_.set(desired);
598 return true;
599 }
600 expected = *this->data_;
601 return false;
602}
603#elif ORIGINAL_COMPILER_MSVC
604
605template<typename TYPE>
606constexpr bool original::isLockFreeType() noexcept {
607 return sizeof(TYPE) == 4 || sizeof(TYPE) == 8;
608}
609
610template <typename TYPE>
611template <typename From, typename To>
613{
614 if constexpr (std::is_pointer_v<From>) {
615 return reinterpret_cast<To>(value);
616 } else {
617 return static_cast<To>(value);
618 }
619}
620
621template <typename TYPE>
622template <typename To, typename From>
624{
625 if constexpr (std::is_pointer_v<To>) {
626 return reinterpret_cast<To>(value);
627 } else {
628 return static_cast<To>(value);
629 }
630}
631
632template <typename TYPE>
634{
635 std::memset(this->data_, byte{}, sizeof(TYPE));
636}
637
638template <typename TYPE>
639original::atomicImpl<TYPE, false>::atomicImpl(TYPE value, const memOrder order)
640{
641 this->store(value, order);
642}
643
644template <typename TYPE>
645constexpr bool
647{
648 return true;
649}
650
651template <typename TYPE>
652void original::atomicImpl<TYPE, false>::store(TYPE value, const memOrder order)
653{
654 switch (order) {
655 case memOrder::RELAXED:
656 this->data_ = value;
657 break;
658 case memOrder::RELEASE:
659 _WriteBarrier();
660 this->data_ = value;
661 break;
662 case memOrder::SEQ_CST:
663 default:
664 if constexpr (sizeof(TYPE) == 4) {
665 InterlockedExchange(reinterpret_cast<volatile val_type*>(&this->data_),
666 atomicCastTo<TYPE, val_type>(value));
667 } else {
668 InterlockedExchange64(reinterpret_cast<volatile val_type*>(&this->data_),
669 atomicCastTo<TYPE, val_type>(value));
670 }
671 break;
672 }
673}
674
675template <typename TYPE>
676TYPE original::atomicImpl<TYPE, false>::load(const memOrder order) const noexcept
677{
678 TYPE value;
679 switch (order) {
680 case memOrder::RELAXED:
681 value = this->data_;
682 break;
683 case memOrder::ACQUIRE:
684 value = this->data_;
685 _ReadBarrier();
686 break;
687 case memOrder::SEQ_CST:
688 default:
689 if constexpr (sizeof(TYPE) == 4) {
690 return atomicCastBack<TYPE, val_type>(
691 InterlockedCompareExchange(
692 reinterpret_cast<volatile LONG*>(const_cast<TYPE*>(&this->data_)),
693 0, 0));
694 } else {
695 return atomicCastBack<TYPE, val_type>(
696 InterlockedCompareExchange64(
697 reinterpret_cast<volatile LONG64*>(const_cast<TYPE*>(&this->data_)),
698 0, 0));
699 }
700 break;
701 }
702 return value;
703}
704
705template <typename TYPE>
707{
708 return this->load();
709}
710
711template <typename TYPE>
713{
714 return this->load();
715}
716
717template <typename TYPE>
720{
721 this->store(std::move(value));
722 return *this;
723}
724
725template <typename TYPE>
728{
729 if constexpr (sizeof(TYPE) == 4) {
730 InterlockedAdd(reinterpret_cast<volatile val_type*>(&this->data_),
731 atomicCastTo<TYPE, val_type>(value));
732 } else {
733 InterlockedAdd64(reinterpret_cast<volatile val_type*>(&this->data_),
734 atomicCastTo<TYPE, val_type>(value));
735 }
736 return *this;
737}
738
739template <typename TYPE>
742{
743 return *this += -value;
744}
745
746template <typename TYPE>
747TYPE original::atomicImpl<TYPE, false>::exchange(TYPE value, memOrder) noexcept
748{
749 if constexpr (sizeof(TYPE) == 4) {
750 return atomicCastBack<TYPE, val_type>(
751 InterlockedExchange(reinterpret_cast<volatile val_type*>(&this->data_),
752 atomicCastTo<TYPE, val_type>(value))
753 );
754 } else {
755 return atomicCastBack<TYPE, val_type>(
756 InterlockedExchange64(reinterpret_cast<volatile val_type*>(&this->data_),
757 atomicCastTo<TYPE, val_type>(value))
758 );
759 }
760}
761
762template <typename TYPE>
763bool original::atomicImpl<TYPE, false>::exchangeCmp(TYPE& expected, TYPE desired, memOrder) noexcept
764{
765 if constexpr (sizeof(TYPE) == 4) {
766 val_type old = InterlockedCompareExchange(
767 reinterpret_cast<volatile val_type*>(&this->data_),
768 atomicCastTo<TYPE, val_type>(desired),
769 atomicCastTo<TYPE, val_type>(expected));
770 const bool success = old == atomicCastTo<TYPE, val_type>(expected);
771 if (!success) expected = atomicCastBack<TYPE, val_type>(old);
772 return success;
773 } else {
774 val_type old = InterlockedCompareExchange64(
775 reinterpret_cast<volatile val_type*>(&this->data_),
776 atomicCastTo<TYPE, val_type>(desired),
777 atomicCastTo<TYPE, val_type>(expected));
778 const bool success = old == atomicCastTo<TYPE, val_type>(expected);
779 if (!success) expected = atomicCastBack<TYPE, val_type>(old);
780 return success;
781 }
782}
783
784template <typename TYPE>
786 uniqueLock lock{this->mutex_};
787 this->data_.set(value);
788}
789
790template <typename TYPE>
791constexpr bool original::atomicImpl<TYPE, true>::isLockFree() noexcept {
792 return false;
793}
794
795template <typename TYPE>
796void original::atomicImpl<TYPE, true>::store(TYPE value, memOrder) {
797 uniqueLock lock{this->mutex_};
798 this->data_.set(value);
799}
800
801template <typename TYPE>
802TYPE original::atomicImpl<TYPE, true>::load(memOrder) const noexcept {
803 uniqueLock lock{this->mutex_};
804 return *this->data_;
805}
806
807template <typename TYPE>
809{
810 return this->load();
811}
812
813template <typename TYPE>
815{
816 return this->load();
817}
818
819template <typename TYPE>
822{
823 this->store(std::move(value));
824 return *this;
825}
826
827template <typename TYPE>
829{
830 uniqueLock lock{this->mutex_};
831 TYPE result = *this->data_ + value;
832 this->data_.set(result);
833 return *this;
834}
835
836template <typename TYPE>
838{
839 uniqueLock lock{this->mutex_};
840 TYPE result = *this->data_ - value;
841 this->data_.set(result);
842 return *this;
843}
844
845template <typename TYPE>
846TYPE original::atomicImpl<TYPE, true>::exchange(const TYPE& value, memOrder) noexcept {
847 uniqueLock lock{this->mutex_};
848 TYPE result = *this->data_;
849 this->data_.set(value);
850 return result;
851}
852
853template <typename TYPE>
854bool original::atomicImpl<TYPE, true>::exchangeCmp(TYPE& expected, const TYPE& desired, memOrder) noexcept {
855 uniqueLock lock{this->mutex_};
856 if (*this->data_ == expected) {
857 this->data_.set(desired);
858 return true;
859 }
860 expected = *this->data_;
861 return false;
862}
863
864#endif
865
866template<typename TYPE>
868{
869 return atomic<TYPE>{};
870}
871
872template<typename TYPE>
874{
875 return atomic<TYPE>{std::move(value)};
876}
877#endif
Unique ownership smart pointer with move semantics.
Definition ownerPtr.h:37
ownerPtr & operator=(const ownerPtr &other)=delete
Copy assignment prohibited.
Platform-independent type definitions and compiler/platform detection.
Cross-platform mutex and lock management utilities.
Main namespace for the project Original.
Definition algorithms.h:21
auto makeAtomic()
Creates a default-constructed atomic object.
Definition atomic.h:867
Type-safe optional value container.