ExtendedAtomicOps.h 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214
  1. #pragma once
  2. #include "ExtendedAtomicTypes.h"
  3. UNITY_PLATFORM_BEGIN_NAMESPACE;
  4. enum memory_order_relaxed_t { memory_order_relaxed = 0 };
  5. enum memory_order_acquire_t { memory_order_acquire = 2, memory_order_consume = memory_order_acquire };
  6. enum memory_order_release_t { memory_order_release = 3 };
  7. enum memory_order_acq_rel_t { memory_order_acq_rel = 4 };
  8. enum memory_order_seq_cst_t { memory_order_seq_cst = 5 };
  9. /*
  10. Available atomic functions:
  11. // non-explicit versions, use sequential consistency semantic by default
  12. // atomic load
  13. atomic_word atomic_load (const volatile atomic_word* p);
  14. // atomic store
  15. void atomic_store (volatile atomic_word* p, atomic_word val);
  16. // atomic exchange, returns previous value
  17. atomic_word atomic_exchange (volatile atomic_word* p, atomic_word val);
  18. // atomic compare exchange (strong), returns if the operation succeeded and update *oldval with the previous value
  19. bool atomic_compare_exchange (volatile atomic_word* p, atomic_word* oldval, atomic_word newval);
  20. // atomic fetch then add, returns previous value
  21. atomic_word atomic_fetch_add (volatile atomic_word *p, atomic_word val);
  22. // atomic fetch then sub, returns previous value
  23. atomic_word atomic_fetch_sub (volatile atomic_word *p, atomic_word val);
  24. // explicit versions
  25. // memory fence with <mo> semantic
  26. void atomic_thread_fence (memory_order_t mo);
  27. // atomic load with <mo> semantic
  28. atomic_word atomic_load_explicit (const volatile atomic_word* p, memory_order_t mo);
  29. // atomic store with <mo> semantic
  30. void atomic_store_explicit (volatile atomic_word* p, atomic_word v, memory_order_t mo);
  31. // atomic exchange with <mo> semantic, returns previous value
  32. atomic_word atomic_exchange_explicit (volatile atomic_word* p, atomic_word v, memory_order_t mo);
  33. // on RISC platforms with LoadLinked-StoreConditional available:
  34. // atomic_compare_exchange_weak_explicit: can fail spuriously even if *p == *oldval
  35. // uses <success> memory barrier when it succeeds, <failure> otherwize
  36. // returns the state of the operation and updates *oldval with the previous value
  37. bool atomic_compare_exchange_weak_explicit (volatile atomic_word* p, atomic_word *oldval, atomic_word newval, memory_order_t success, memory_order_t failure);
  38. // atomic_compare_exchange_strong_explicit: can loop and only returns false if *p != *oldval
  39. // uses <success> memory barrier when it succeeds, <failure> otherwise
  40. // returns the state of the operation and updates *oldval with the previous value
  41. bool atomic_compare_exchange_strong_explicit (volatile atomic_word* p, atomic_word *oldval, atomic_word newval, memory_order_t success, memory_order_t failure);
  42. // atomic fetch then add with <mo> semantic, returns previous value
  43. int atomic_fetch_add_explicit (volatile int* p, int val, memory_order_t mo);
  44. atomic_word atomic_fetch_add_explicit (volatile atomic_word* p, atomic_word val, memory_order_t mo);
  45. // atomic fetch then sub with <mo> semantic, returns previous value
  46. int atomic_fetch_sub_explicit (volatile int* p, int val, memory_order_t mo);
  47. atomic_word atomic_fetch_sub_explicit (volatile atomic_word* p, atomic_word val, memory_order_t mo);
  48. // extensions to the C++0x11 standard:
  49. // spinning hint for the processor
  50. void atomic_pause ();
  51. // atomic increment with relaxed semantic
  52. void atomic_retain (volatile int *p);
  53. // atomic decrement with acquire/release semantic, returns true if resulting value is zero, false otherwize
  54. bool atomic_release (volatile int *p);
  55. // on platforms with double word compare exchange (ABA safe atomic pointers):
  56. // atomic load
  57. atomic_word2 atomic_load_explicit (const volatile atomic_word2* p, memory_order_t mo);
  58. // atomic store
  59. void atomic_store_explicit (volatile atomic_word2* p, atomic_word2 v, memory_order_t mo);
  60. // atomic exchange
  61. atomic_word atomic_exchange_explicit (volatile atomic_word2* p, atomic_word2 newval, memory_order_t mo);
  62. // atomic compare exchange
  63. bool atomic_compare_exchange_strong_explicit (volatile atomic_word2* p, atomic_word2* oldval, atomic_word2 newval, memory_order_t success, memory_order_t failure);
  64. */
  65. #if IL2CPP_TARGET_HAS_EXTENDED_ATOMICS
  66. # include "os/ExtendedAtomicOps.h"
  67. #elif defined(__x86_64__) || defined(_M_X64)
  68. # include "ExtendedAtomicOps-x86-64.h"
  69. # define UNITY_ATOMIC_INT_OVERLOAD
  70. #elif defined(__x86__) || defined(__i386__) || defined(_M_IX86)
  71. # include "ExtendedAtomicOps-x86.h"
  72. #elif (defined(__arm64__) || defined(__aarch64__)) && (defined(__clang__) || defined(__GNUC__))
  73. # include "ExtendedAtomicOps-arm64.h"
  74. # define UNITY_ATOMIC_INT_OVERLOAD
  75. #elif defined(_M_ARM) || (defined(__arm__) && (defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7S__)) && (!UNITY_STV_API) && (defined(__clang__) || defined(__GNUC__)))
  76. # include "ExtendedAtomicOps-arm.h"
  77. #elif PLATFORM_WIIU
  78. # include "ExtendedAtomicOps-ppc.h"
  79. #elif PLATFORM_PSVITA
  80. # include "PlatformExtendedAtomicOps.h"
  81. #elif (defined(__ppc64__) || defined(_ARCH_PPC64)) && (defined(__clang__) || defined(__GNUC__))
  82. # include "ExtendedAtomicOps-ppc64.h"
  83. # define UNITY_ATOMIC_INT_OVERLOAD
  84. //#elif defined (__ppc__) && (defined (__clang__) || defined (__GNUC__))
  85. //# include "Runtime/Threads/ExtendedAtomicOps-ppc.h"
  86. #else
  87. #define UNITY_NO_ATOMIC_OPS
  88. static inline atomic_word atomic_load_explicit(const volatile atomic_word* p, memory_order_relaxed_t)
  89. {
  90. return *p;
  91. }
  92. static inline void atomic_store_explicit(volatile atomic_word* p, atomic_word v, memory_order_relaxed_t)
  93. {
  94. *p = v;
  95. }
  96. #endif
  97. #ifndef UNITY_NO_ATOMIC_OPS
  98. // non-explicit versions, use sequential consistency semantic
  99. static inline void atomic_thread_fence()
  100. {
  101. atomic_thread_fence(memory_order_seq_cst);
  102. }
  103. static inline atomic_word atomic_load(const volatile atomic_word* p)
  104. {
  105. return atomic_load_explicit(p, memory_order_seq_cst);
  106. }
  107. static inline void atomic_store(volatile atomic_word* p, atomic_word val)
  108. {
  109. atomic_store_explicit(p, val, memory_order_seq_cst);
  110. }
  111. static inline atomic_word atomic_exchange(volatile atomic_word* p, atomic_word val)
  112. {
  113. return atomic_exchange_explicit(p, val, memory_order_seq_cst);
  114. }
  115. static inline bool atomic_compare_exchange(volatile atomic_word* p, atomic_word* oldval, atomic_word newval)
  116. {
  117. return atomic_compare_exchange_strong_explicit(p, oldval, newval, memory_order_seq_cst, memory_order_seq_cst);
  118. }
  119. static inline atomic_word atomic_fetch_add(volatile atomic_word *p, atomic_word val)
  120. {
  121. return atomic_fetch_add_explicit(p, val, memory_order_seq_cst);
  122. }
  123. static inline atomic_word atomic_fetch_sub(volatile atomic_word *p, atomic_word val)
  124. {
  125. return atomic_fetch_sub_explicit(p, val, memory_order_seq_cst);
  126. }
  127. #if defined(UNITY_ATOMIC_INT_OVERLOAD)
  128. static inline int atomic_load(const volatile int* p)
  129. {
  130. return atomic_load_explicit(p, memory_order_seq_cst);
  131. }
  132. static inline void atomic_store(volatile int* p, int val)
  133. {
  134. atomic_store_explicit(p, val, memory_order_seq_cst);
  135. }
  136. static inline int atomic_fetch_add(volatile int *p, int val)
  137. {
  138. return static_cast<int>(atomic_fetch_add_explicit(p, val, memory_order_seq_cst));
  139. }
  140. static inline int atomic_fetch_sub(volatile int *p, int val)
  141. {
  142. return static_cast<int>(atomic_fetch_sub_explicit(p, val, memory_order_seq_cst));
  143. }
  144. #endif
  145. #endif
  146. UNITY_PLATFORM_END_NAMESPACE;