ops_emulated.hpp 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2014, 2020 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/ops_emulated.hpp
  10. *
  11. * This header contains lock pool-based implementation of the \c operations template.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_OPS_EMULATED_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_OPS_EMULATED_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/memory_order.hpp>
  17. #include <boost/atomic/detail/config.hpp>
  18. #include <boost/atomic/detail/storage_traits.hpp>
  19. #include <boost/atomic/detail/operations_fwd.hpp>
  20. #include <boost/atomic/detail/lock_pool.hpp>
  21. #ifdef BOOST_HAS_PRAGMA_ONCE
  22. #pragma once
  23. #endif
  24. namespace boost {
  25. namespace atomics {
  26. namespace detail {
  27. template< std::size_t Size, std::size_t Alignment, bool = Alignment >= storage_traits< Size >::native_alignment >
  28. struct base_emulated_operations
  29. {
  30. typedef typename storage_traits< Size >::type storage_type;
  31. };
  32. template< std::size_t Size, std::size_t Alignment >
  33. struct base_emulated_operations< Size, Alignment, false >
  34. {
  35. typedef buffer_storage< Size, Alignment > storage_type;
  36. };
  37. template< std::size_t Size, std::size_t Alignment, bool Signed >
  38. struct emulated_operations :
  39. public base_emulated_operations< Size, Alignment >
  40. {
  41. typedef base_emulated_operations< Size, Alignment > base_type;
  42. // Define storage_type to have alignment not greater than Alignment. This will allow operations to work with value_types
  43. // that possibly have weaker alignment requirements than storage_traits< Size >::type would. This is important for atomic_ref<>.
  44. // atomic<> will allow higher alignment requirement than its value_type.
  45. // Note that storage_type should be an integral type, if possible, so that arithmetic and bitwise operations are possible.
  46. typedef typename base_type::storage_type storage_type;
  47. static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = Size;
  48. static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = Alignment >= storage_traits< Size >::alignment ? storage_traits< Size >::alignment : Alignment;
  49. static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
  50. static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
  51. static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = false;
  52. typedef lock_pool::scoped_lock< storage_alignment > scoped_lock;
  53. static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  54. {
  55. scoped_lock lock(&storage);
  56. const_cast< storage_type& >(storage) = v;
  57. }
  58. static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order) BOOST_NOEXCEPT
  59. {
  60. scoped_lock lock(&storage);
  61. return const_cast< storage_type const& >(storage);
  62. }
  63. static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  64. {
  65. storage_type& s = const_cast< storage_type& >(storage);
  66. scoped_lock lock(&storage);
  67. storage_type old_val = s;
  68. s += v;
  69. return old_val;
  70. }
  71. static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  72. {
  73. storage_type& s = const_cast< storage_type& >(storage);
  74. scoped_lock lock(&storage);
  75. storage_type old_val = s;
  76. s -= v;
  77. return old_val;
  78. }
  79. static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  80. {
  81. storage_type& s = const_cast< storage_type& >(storage);
  82. scoped_lock lock(&storage);
  83. storage_type old_val = s;
  84. s = v;
  85. return old_val;
  86. }
  87. static BOOST_FORCEINLINE bool compare_exchange_strong(
  88. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
  89. {
  90. storage_type& s = const_cast< storage_type& >(storage);
  91. scoped_lock lock(&storage);
  92. storage_type old_val = s;
  93. const bool res = old_val == expected;
  94. if (res)
  95. s = desired;
  96. expected = old_val;
  97. return res;
  98. }
  99. static BOOST_FORCEINLINE bool compare_exchange_weak(
  100. storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
  101. {
  102. // Note: This function is the exact copy of compare_exchange_strong. The reason we're not just forwarding the call
  103. // is that MSVC-12 ICEs in this case.
  104. storage_type& s = const_cast< storage_type& >(storage);
  105. scoped_lock lock(&storage);
  106. storage_type old_val = s;
  107. const bool res = old_val == expected;
  108. if (res)
  109. s = desired;
  110. expected = old_val;
  111. return res;
  112. }
  113. static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  114. {
  115. storage_type& s = const_cast< storage_type& >(storage);
  116. scoped_lock lock(&storage);
  117. storage_type old_val = s;
  118. s &= v;
  119. return old_val;
  120. }
  121. static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  122. {
  123. storage_type& s = const_cast< storage_type& >(storage);
  124. scoped_lock lock(&storage);
  125. storage_type old_val = s;
  126. s |= v;
  127. return old_val;
  128. }
  129. static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  130. {
  131. storage_type& s = const_cast< storage_type& >(storage);
  132. scoped_lock lock(&storage);
  133. storage_type old_val = s;
  134. s ^= v;
  135. return old_val;
  136. }
  137. static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  138. {
  139. return !!exchange(storage, (storage_type)1, order);
  140. }
  141. static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  142. {
  143. store(storage, (storage_type)0, order);
  144. }
  145. };
  146. template< std::size_t Size, bool Signed >
  147. struct operations :
  148. public emulated_operations< Size, storage_traits< Size >::alignment, Signed >
  149. {
  150. };
  151. } // namespace detail
  152. } // namespace atomics
  153. } // namespace boost
  154. #endif // BOOST_ATOMIC_DETAIL_OPS_EMULATED_HPP_INCLUDED_