|
40 | 40 | #ifndef GEOGRAM_BASIC_ATOMICS
|
41 | 41 | #define GEOGRAM_BASIC_ATOMICS
|
42 | 42 |
|
43 |
| -// Obsolete due to switching to std::atomics |
| 43 | +#include <geogram/basic/common.h> |
| 44 | +#include <geogram/basic/numeric.h> |
44 | 45 |
|
| 46 | +/** |
| 47 | + * \file geogram/basic/atomics.h |
| 48 | + * \brief Functions for atomic operations |
| 49 | + */ |
| 50 | + |
| 51 | +#ifdef GEO_OS_LINUX |
| 52 | +# if defined(GEO_OS_EMSCRIPTEN) |
| 53 | +# define GEO_USE_DUMMY_ATOMICS |
| 54 | +# elif defined(GEO_OS_RASPBERRY) |
| 55 | +# define GEO_USE_ARM32_ATOMICS |
| 56 | +# elif defined(GEO_OS_ANDROID) |
| 57 | +# define GEO_USE_ANDROID_ATOMICS |
| 58 | +# else |
| 59 | +# define GEO_USE_X86_ATOMICS |
| 60 | +# endif |
45 | 61 | #endif
|
46 | 62 |
|
| 63 | +#if defined(GEO_USE_DUMMY_ATOMICS) |
| 64 | + |
| 65 | +inline void geo_pause() { |
| 66 | +} |
| 67 | + |
| 68 | +inline char atomic_bittestandset_x86(volatile unsigned int*, unsigned int) { |
| 69 | + return 0; |
| 70 | +} |
| 71 | + |
| 72 | +inline char atomic_bittestandreset_x86(volatile unsigned int*, unsigned int) { |
| 73 | + return 0; |
| 74 | +} |
| 75 | + |
| 76 | +#elif defined(GEO_USE_ANDROID_ATOMICS) |
| 77 | + |
| 78 | +/** A mutex for Android */ |
| 79 | +typedef GEO::Numeric::uint32 android_mutex_t; |
| 80 | + |
| 81 | +inline void lock_mutex_android(volatile android_mutex_t* lock) { |
| 82 | + while(__sync_lock_test_and_set(lock, 1) != 0); |
| 83 | +} |
| 84 | + |
| 85 | +inline void unlock_mutex_android(volatile android_mutex_t* lock) { |
| 86 | + __sync_lock_release(lock); |
| 87 | +} |
| 88 | + |
| 89 | +inline unsigned int atomic_bitset_android(volatile unsigned int* ptr, unsigned int bit) { |
| 90 | + return __sync_fetch_and_or(ptr, 1u << bit) & (1u << bit); |
| 91 | +} |
| 92 | + |
| 93 | +inline unsigned int atomic_bitreset_android(volatile unsigned int* ptr, unsigned int bit) { |
| 94 | + return __sync_fetch_and_and(ptr, ~(1u << bit)) & (1u << bit); |
| 95 | +} |
| 96 | + |
| 97 | +inline void memory_barrier_android() { |
| 98 | + // Full memory barrier. |
| 99 | + __sync_synchronize(); |
| 100 | +} |
| 101 | + |
| 102 | +inline void wait_for_event_android() { |
| 103 | + /* TODO */ |
| 104 | +} |
| 105 | + |
| 106 | +inline void send_event_android() { |
| 107 | + /* TODO */ |
| 108 | +} |
| 109 | + |
| 110 | +#elif defined(GEO_USE_ARM32_ATOMICS) |
| 111 | + |
| 112 | +/** A mutex for ARM processors */ |
| 113 | +typedef GEO::Numeric::uint32 arm32_mutex_t; |
| 114 | + |
| 115 | +/** |
| 116 | + * \brief Acquires a lock (ARM only) |
| 117 | + * \param[in,out] lock the mutex to lock |
| 118 | + */ |
| 119 | +inline void lock_mutex_arm32(volatile arm32_mutex_t* lock) { |
| 120 | + arm_mutex_t tmp; |
| 121 | + __asm__ __volatile__ ( |
| 122 | + "1: ldrex %0, [%1] \n" // read lock |
| 123 | + " cmp %0, #0 \n" // check if zero |
| 124 | + " wfene \n" // wait for event if non-zero |
| 125 | + " strexeq %0, %2, [%1] \n" // attempt to store new value |
| 126 | + " cmpeq %0, #0 \n" // test if store succeeded |
| 127 | + " bne 1b \n" // retry if not |
| 128 | + " dmb \n" // memory barrier |
| 129 | + : "=&r" (tmp) |
| 130 | + : "r" (lock), "r" (1) |
| 131 | + : "cc", "memory"); |
| 132 | +} |
| 133 | + |
| 134 | +/** |
| 135 | + * \brief Releases a lock (ARM only) |
| 136 | + * \param[in,out] lock the mutex to unlock |
| 137 | + */ |
| 138 | +inline void unlock_mutex_arm32(volatile arm32_mutex_t* lock) { |
| 139 | + __asm__ __volatile__ ( |
| 140 | + " dmb \n" // ensure all previous access are observed |
| 141 | + " str %1, [%0] \n" // clear the lock |
| 142 | + " dsb \n" // ensure completion of clear lock ... |
| 143 | + " sev \n" // ... before sending the event |
| 144 | + : |
| 145 | + : "r" (lock), "r" (0) |
| 146 | + : "cc", "memory"); |
| 147 | +} |
| 148 | + |
| 149 | +/** |
| 150 | + * \brief Atomically tests and sets a bit (ARM only) |
| 151 | + * \details Sets the bit \p bit of the *\p ptr. |
| 152 | + * The function is atomic and acts as a read-write memory barrier. |
| 153 | + * \param[in] ptr a pointer to an unsigned integer |
| 154 | + * \param[in] bit index of the bit to set in *\p ptr |
| 155 | + * \retval a non-zero integer if the bit was previously set |
| 156 | + * \retval 0 if the bit was previously not set |
| 157 | + */ |
| 158 | +inline unsigned int atomic_bitset_arm32(volatile unsigned int* ptr, unsigned int bit) { |
| 159 | + unsigned int tmp; |
| 160 | + unsigned int result; |
| 161 | + unsigned int OK; |
| 162 | + __asm__ __volatile__ ( |
| 163 | + "1: ldrex %1, [%5] \n" // result = *ptr |
| 164 | + " orr %0, %1, %6, LSL %4 \n" // tmp = result OR (1 << bit) |
| 165 | + " strex %2, %0, [%5] \n" // *ptr = tmp, status in OK |
| 166 | + " teq %2, #0 \n" // if !OK then |
| 167 | + " bne 1b \n" // goto 1: |
| 168 | + " and %1, %1, %6, LSL %4 \n" // result = result AND (1 << bit) |
| 169 | + : "=&r" (tmp), "=&r" (result), "=&r" (OK), "+m" (*ptr) |
| 170 | + : "r" (bit), "r" (ptr), "r" (1) |
| 171 | + : "cc" |
| 172 | + ); |
| 173 | + return result; |
| 174 | +} |
| 175 | + |
| 176 | +/** |
| 177 | + * \brief Atomically tests and resets a bit (ARM only) |
| 178 | + * \details Resets the bit \p bit of *\p ptr. |
| 179 | + * The function is atomic and acts as a read-write memory barrier. |
| 180 | + * \param[in] ptr a pointer to an unsigned integer |
| 181 | + * \param[in] bit index of the bit to reset in *\p ptr |
| 182 | + * \retval a non-zero integer if the bit was previously reset |
| 183 | + * \retval 0 if the bit was previously not reset |
| 184 | + */ |
| 185 | +inline unsigned int atomic_bitreset_arm32(volatile unsigned int* ptr, unsigned int bit) { |
| 186 | + unsigned int tmp; |
| 187 | + unsigned int result; |
| 188 | + unsigned int OK; |
| 189 | + __asm__ __volatile__ ( |
| 190 | + "1: ldrex %1, [%5] \n" // result = *ptr |
| 191 | + " bic %0, %1, %6, LSL %4 \n" // tmp = result AND NOT(1 << bit) |
| 192 | + " strex %2, %0, [%5] \n" // *ptr = tmp, status in OK |
| 193 | + " teq %2, #0 \n" // if !OK then |
| 194 | + " bne 1b \n" // goto 1: |
| 195 | + " and %1, %1, %6, LSL %4 \n" // result = result AND (1 << bit) |
| 196 | + : "=&r" (tmp), "=&r" (result), "=&r" (OK), "+m" (*ptr) |
| 197 | + : "r" (bit), "r" (ptr), "r" (1) |
| 198 | + : "cc" |
| 199 | + ); |
| 200 | + return result; |
| 201 | +} |
| 202 | + |
| 203 | +/** |
| 204 | + * \brief Issues a memory and compiler barrier (ARM only) |
| 205 | + */ |
| 206 | +inline void memory_barrier_arm32() { |
| 207 | + __asm__ __volatile__ ( |
| 208 | + "dmb \n" |
| 209 | + : : : "memory" |
| 210 | + ); |
| 211 | +} |
| 212 | + |
| 213 | +/** |
| 214 | + * \brief Waits for an event (ARM only) |
| 215 | + */ |
| 216 | +inline void wait_for_event_arm32() { |
| 217 | + __asm__ __volatile__ ( |
| 218 | + "wfe \n" |
| 219 | + : : : |
| 220 | + ); |
| 221 | +} |
| 222 | + |
| 223 | +/** |
| 224 | + * \brief Sends an event (ARM only) |
| 225 | + */ |
| 226 | +inline void send_event_arm32() { |
| 227 | + __asm__ __volatile__ ( |
| 228 | + "dsb \n" // ensure completion of store operations |
| 229 | + "sev \n" |
| 230 | + : : : |
| 231 | + ); |
| 232 | +} |
| 233 | + |
| 234 | +#elif defined(GEO_USE_X86_ATOMICS) |
| 235 | + |
| 236 | +# define GEO_USE_X86_PAUSE |
| 237 | + |
| 238 | +# ifdef GEO_USE_X86_PAUSE |
| 239 | + |
| 240 | +/** |
| 241 | + * \brief Issues a processor pause (INTEL only) |
| 242 | + */ |
| 243 | +inline void geo_pause() { |
| 244 | + __asm__ __volatile__ ( |
| 245 | + "pause;\n" |
| 246 | + ); |
| 247 | +} |
| 248 | + |
| 249 | +# else |
| 250 | +# ifdef __ICC |
| 251 | +# define geo_pause _mm_pause |
| 252 | +# else |
| 253 | +# define geo_pause __builtin_ia32_pause |
| 254 | +# endif |
| 255 | + |
| 256 | +# endif |
| 257 | + |
| 258 | +/** |
| 259 | + * \brief Atomically tests and sets a bit (INTEL only) |
| 260 | + * \details Sets bit \p bit of *\p ptr and returns its previous value. |
| 261 | + * The function is atomic and acts as a read-write memory barrier. |
| 262 | + * \param[in] ptr a pointer to an unsigned integer |
| 263 | + * \param[in] bit index of the bit to set in *\p ptr |
| 264 | + * \return the previous value of bit \p bit |
| 265 | + */ |
| 266 | +inline char atomic_bittestandset_x86(volatile unsigned int* ptr, unsigned int bit) { |
| 267 | + char out; |
| 268 | +#if defined(__x86_64) |
| 269 | + __asm__ __volatile__ ( |
| 270 | + "lock; bts %2,%1\n" // set carry flag if bit %2 (bit) of %1 (ptr) is set |
| 271 | + // then set bit %2 of %1 |
| 272 | + "sbb %0,%0\n" // set %0 (out) if carry flag is set |
| 273 | + : "=r" (out), "=m" (*ptr) |
| 274 | + : "Ir" (bit) |
| 275 | + : "memory" |
| 276 | + ); |
| 277 | +#else |
| 278 | + __asm__ __volatile__ ( |
| 279 | + "lock; bts %2,%1\n" // set carry flag if bit %2 (bit) of %1 (ptr) is set |
| 280 | + // then set bit %2 of %1 |
| 281 | + "sbb %0,%0\n" // set %0 (out) if carry flag is set |
| 282 | + : "=q" (out), "=m" (*ptr) |
| 283 | + : "Ir" (bit) |
| 284 | + : "memory" |
| 285 | + ); |
| 286 | +#endif |
| 287 | + return out; |
| 288 | +} |
| 289 | + |
| 290 | +/** |
| 291 | + * \brief Atomically tests and resets a bit (INTEL only) |
| 292 | + * \details Resets bit \p bit of *\p ptr and returns its previous value. |
| 293 | + * The function is atomic and acts as a read-write memory barrier |
| 294 | + * \param[in] ptr a pointer to an unsigned integer |
| 295 | + * \param[in] bit index of the bit to reset in \p ptr |
| 296 | + * \return the previous value of bit \p bit |
| 297 | + */ |
| 298 | +inline char atomic_bittestandreset_x86(volatile unsigned int* ptr, unsigned int bit) { |
| 299 | + char out; |
| 300 | +#if defined(__x86_64) |
| 301 | + __asm__ __volatile__ ( |
| 302 | + "lock; btr %2,%1\n" // set carry flag if bit %2 (bit) of %1 (ptr) is set |
| 303 | + // then reset bit %2 of %1 |
| 304 | + "sbb %0,%0\n" // set %0 (out) if carry flag is set |
| 305 | + : "=r" (out), "=m" (*ptr) |
| 306 | + : "Ir" (bit) |
| 307 | + : "memory" |
| 308 | + ); |
| 309 | +#else |
| 310 | + __asm__ __volatile__ ( |
| 311 | + "lock; btr %2,%1\n" // set carry flag if bit %2 (bit) of %1 (ptr) is set |
| 312 | + // then reset bit %2 of %1 |
| 313 | + "sbb %0,%0\n" // set %0 (out) if carry flag is set |
| 314 | + : "=q" (out), "=m" (*ptr) |
| 315 | + : "Ir" (bit) |
| 316 | + : "memory" |
| 317 | + ); |
| 318 | +#endif |
| 319 | + return out; |
| 320 | +} |
| 321 | + |
| 322 | +#elif defined(GEO_OS_APPLE) |
| 323 | + |
| 324 | +#include <libkern/OSAtomic.h> |
| 325 | + |
| 326 | +#elif defined(GEO_OS_WINDOWS) |
| 327 | + |
| 328 | +#include <windows.h> |
| 329 | +#include <intrin.h> |
| 330 | +#pragma intrinsic(_InterlockedCompareExchange8) |
| 331 | +#pragma intrinsic(_InterlockedCompareExchange16) |
| 332 | +#pragma intrinsic(_InterlockedCompareExchange) |
| 333 | +#pragma intrinsic(_interlockedbittestandset) |
| 334 | +#pragma intrinsic(_interlockedbittestandreset) |
| 335 | +#pragma intrinsic(_ReadBarrier) |
| 336 | +#pragma intrinsic(_WriteBarrier) |
| 337 | +#pragma intrinsic(_ReadWriteBarrier) |
| 338 | + |
| 339 | +# ifdef GEO_COMPILER_MINGW |
| 340 | +inline void geo_pause() { |
| 341 | +} |
| 342 | +# endif |
| 343 | + |
| 344 | +#endif // GEO_OS_WINDOWS |
| 345 | + |
| 346 | +#endif |
| 347 | + |
| 348 | + |
0 commit comments