nlib
Platform_unix.h
1 
2 #pragma once
3 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
4 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
5 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
6 # error do not include directly
7 #endif
8 
9 #if defined(__linux__) || \
10  defined(__FreeBSD__) || \
11  defined(__CYGWIN__) || \
12  (defined(__APPLE__) && defined(__MACH__))
13 
14 #if defined(__APPLE__) && defined(__MACH__)
15 #define _DARWIN_UNLIMITED_SELECT
16 #include <libkern/OSAtomic.h>
17 #include <errno.h>
18 #if __has_include( <os/lock.h> )
19 #include <os/lock.h>
20 #endif
21 #endif
22 
23 #ifdef __cplusplus
24 extern "C" {
25 #endif
26 
27 #ifndef NLIB_UNIX
28 # define NLIB_UNIX
29 #endif
30 
31 // For now, only supports gcc or clang
32 #if !defined(__GNUC__) && !defined(__clang__)
33 # error
34 #endif
35 
36 #define NLIB_HAS_STDHEADER_STDINT
37 #define NLIB_HAS_STDHEADER_INTTYPES
38 
39 #if !defined(__FreeBSD__) && !defined(__APPLE__)
40 // checking __GNU_LIBRARY__, __GLIBC__, __GLIBC_MINOR__ to detect glibc
41 #include <features.h>
42 #endif
43 #include <pthread.h> // for PTHREAD_MUTEX_INITIALIZER, ....
44 #include <semaphore.h> // for sem_t
45 #include <sys/types.h> // for pthread_mutex_t, ....
46 #include <sys/uio.h> // struct iovec
47 #include <fcntl.h> // NOLINT
48 #include <sys/socket.h>
49 #include <poll.h>
50 #include <netinet/in.h>
51 #include <arpa/inet.h>
52 #include <netdb.h>
53 #include <stdint.h>
54 
55 #if defined(__i386__) || defined(__x86_64__)
56 # include <x86intrin.h>
57 #endif
58 
59 #ifndef __CYGWIN__
60 # define NLIB_VIS_HIDDEN __attribute__((visibility("hidden")))
61 # define NLIB_VIS_PUBLIC __attribute__((visibility("default")))
62 # define NLIB_WEAKSYMBOL __attribute__((weak))
63 #else
64 # define NLIB_VIS_HIDDEN
65 # define NLIB_VIS_PUBLIC
66 # define NLIB_WEAKSYMBOL
67 #endif
68 
69 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
70 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
71 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
72 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
73 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
74 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
75 #define NLIB_NORETURN __attribute__((noreturn))
76 #define NLIB_NONNULL __attribute__((nonnull))
77 #define NLIB_NONNULL_1 __attribute__((nonnull (1)))
78 #define NLIB_NONNULL_2 __attribute__((nonnull (2)))
79 #define NLIB_NONNULL_3 __attribute__((nonnull (3)))
80 #define NLIB_NONNULL_4 __attribute__((nonnull (4)))
81 #define NLIB_NONNULL_5 __attribute__((nonnull (5)))
82 #define NLIB_NONNULL_ENABLED
83 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
84 #define NLIB_ATTRIBUTE_PURE __attribute__((pure))
85 #define NLIB_ATTRIBUTE_CONST __attribute__((const))
86 
87 #ifdef __clang__
88 # if __has_attribute(alloc_size)
89 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
90 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
91 # else
92 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
93 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
94 # endif
95 # if __has_attribute(alloc_align)
96 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
97 # else
98 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
99 # endif
100 # if __has_attribute(assume_aligned)
101 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
102 # else
103 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
104 # endif
105 #else
106 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
107 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
108 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
109 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
110 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
111 # else
112 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
113 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
114 # endif
115 #endif
116 
117 #ifndef NLIB_DEPRECATED
118 #define NLIB_DEPRECATED __attribute__((deprecated))
119 #endif
120 #ifndef NLIB_DEPRECATED_MSG
121 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated))
122 #endif
123 
124 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN)
125 # define NLIB_LITTLE_ENDIAN
126 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN)
127 # undef NLIB_LITTLE_ENDIAN
128 #else
129 # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
130 # define NLIB_LITTLE_ENDIAN
131 # else
132 # undef NLIB_LITTLE_ENDIAN
133 # endif
134 #endif
135 #if defined(__x86_64__) || defined(__aarch64__)
136 # define NLIB_64BIT
137 #endif
138 
139 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7
140 # if !defined(__i386__) && !defined(__x86_64__)
141 # error Sorry
142 # endif
143 #define NLIB_MEMORY_ORDER_RELEASE __asm__ __volatile__("sfence": : :"memory")
144 #define NLIB_MEMORY_ORDER_ACQUIRE __asm__ __volatile__("lfence": : :"memory")
145 #define NLIB_MEMORY_ORDER_ACQ_REL __asm__ __volatile__("mfence": : :"memory")
146 #define NLIB_MEMORY_ORDER_SEQ_CST __sync_synchronize()
147 #else
148 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE)
149 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE)
150 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL)
151 #define NLIB_MEMORY_ORDER_SEQ_CST __atomic_thread_fence(__ATOMIC_SEQ_CST)
152 #endif
153 
154 typedef pthread_key_t nlib_tls;
155 #define NLIB_PTHREAD_nlib_tls_alloc
156 #define NLIB_PTHREAD_nlib_tls_free
157 #define NLIB_PTHREAD_nlib_tls_setvalue
158 #define NLIB_PTHREAD_nlib_tls_getvalue
159 
160 #ifndef _LIBCPP_VERSION
161 NLIB_CAPABILITY("mutex")
162 #endif
163 typedef pthread_mutex_t nlib_mutex;
164 
165 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
166 # ifdef __FreeBSD__
167 // https://stackoverflow.com/questions/10369606/constexpr-pointer-value
168 // ((pthread_mutex_t)1) in FreeBSD
169 # define NLIB_MUTEX_INITIALIZER (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) ? \
170  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP : \
171  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP)
172 # else
173 # define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
174 # endif
175 #else
176 # define NLIB_PTHREAD_nlib_mutex_init
177 # define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER
178 #endif
179 
180 #ifndef __FreeBSD__
181 #define NLIB_PTHREAD_nlib_mutex_lock
182 #define NLIB_PTHREAD_nlib_mutex_unlock
183 #define NLIB_PTHREAD_nlib_mutex_trylock
184 #define NLIB_PTHREAD_nlib_mutex_destroy
185 #endif
186 
187 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP)
188 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
189 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
190 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER)
191 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
192 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
193 #elif defined(__FreeBSD__)
194 // FreeBSD does not support static initializer for recursive mutex
195 // nlib initializes recursive mutex if the value is 255
196 # define NLIB_RECURSIVE_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
197  (pthread_mutex_t)255 : (pthread_mutex_t)255)
198 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
199  (pthread_mutex_t)255 : (pthread_mutex_t)255)
200 #elif defined(NLIB_ALPINE)
201 // hacked...
202 #define NLIB_RECURSIVE_MUTEX_INITIALIZER {{{1}}}
203 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER {{{1}}}
204 #else
205 # error Sorry
206 #endif
207 
208 #if defined(__APPLE__)
209 typedef int nlib_semaphore;
210 #else
211 typedef sem_t nlib_semaphore;
212 #endif
213 
214 typedef pthread_cond_t nlib_cond;
215 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER
216 
217 #define NLIB_PTHREAD_nlib_cond_init
218 #define NLIB_PTHREAD_nlib_cond_signal
219 #define NLIB_PTHREAD_nlib_cond_broadcast
220 #define NLIB_PTHREAD_nlib_cond_wait
221 #define NLIB_PTHREAD_nlib_cond_destroy
222 
223 typedef pthread_t nlib_thread;
224 
225 #define NLIB_PTHREAD_nlib_thread_join
226 #define NLIB_PTHREAD_nlib_thread_detach
227 #define NLIB_PTHREAD_nlib_thread_equal
228 #define NLIB_PTHREAD_nlib_thread_self
229 
230 #if defined(__APPLE__)
231 #define NLIB_SPINLOCK_HAS_NATIVE
232 #if 0 && __has_include( <os/lock.h> ) && (MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_12)
233 typedef os_unfair_lock_t nlib_spinlock;
234 #define NLIB_SPINLOCK_INITIALIZER OS_UNFAIR_LOCK_INIT
235 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
236  *lock = OS_UNFAIR_LOCK_INIT;
237 }
238 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
239  os_unfair_lock_lock(*lock);
240 }
241 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
242  return os_unfair_lock_trylock(*lock) ? 0 : EBUSY;
243 }
244 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
245  os_unfair_lock_unlock(*lock);
246 }
247 #else
248 typedef OSSpinLock nlib_spinlock;
249 #define NLIB_SPINLOCK_INITIALIZER (0)
250 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
251  *lock = 0;
252 }
253 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
254  OSSpinLockLock(lock);
255 }
256 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
257  return OSSpinLockTry(lock) ? 0 : EBUSY;
258 }
259 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
260  OSSpinLockUnlock(lock);
261 }
262 #endif
263 #endif
264 
265 #ifdef __cplusplus
266 }
267 #endif
268 
269 #if defined(__clang__)
270 # if __has_feature(cxx_unicode_literals)
271 # define NLIB_CXX11_NEW_CHARACTER_TYPES
272 # endif
273 # if __has_feature(cxx_exceptions)
274 # if __has_feature(cxx_noexcept)
275 # define NLIB_CXX11_NOEXCEPT
276 # endif
277 # else
278 # define NLIB_NOEXCEPT
279 # endif
280 #else
281 // __GNUC__
282 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 4)
283 # define NLIB_CXX11_NEW_CHARACTER_TYPES
284 # endif
285 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)
286 # define NLIB_CXX11_NOEXCEPT
287 # endif
288 // # if !defined(__CYGWIN__) && (!defined(__GLIBC__) || __GLIBC__ < 2 || __GLIBC_MINOR__ < 1)
289 // # error Sorry, glibc is old.
290 // # endif
291 #endif
292 
293 #define NLIB_TIMESPEC_HAS_NATIVE
294 #define NLIB_IOVEC_HAS_NATIVE
295 
296 #ifdef PTHREAD_RWLOCK_INITIALIZER
297 # define NLIB_RWLOCK_HAS_NATIVE
298 #endif
299 #ifdef NLIB_RWLOCK_HAS_NATIVE
300 #ifndef _LIBCPP_VERSION
301 NLIB_CAPABILITY("mutex")
302 #endif
303 typedef pthread_rwlock_t nlib_rwlock;
304 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER
305 
306 #define NLIB_PTHREAD_nlib_rwlock_init
307 #define NLIB_PTHREAD_nlib_rwlock_destroy
308 #define NLIB_PTHREAD_nlib_rwlock_tryrdlock
309 #define NLIB_PTHREAD_nlib_rwlock_trywrlock
310 #define NLIB_PTHREAD_nlib_rwlock_rdlock
311 #define NLIB_PTHREAD_nlib_rwlock_rdunlock
312 #define NLIB_PTHREAD_nlib_rwlock_wrlock
313 #define NLIB_PTHREAD_nlib_rwlock_wrunlock
314 #endif
315 
316 #ifdef PTHREAD_BARRIER_SERIAL_THREAD
317 # define NLIB_BARRIER_HAS_NATIVE
318 #endif
319 #ifdef NLIB_BARRIER_HAS_NATIVE
320 typedef pthread_barrier_t nlib_barrier;
321 #define NLIB_PTHREAD_nlib_barrier_init
322 #define NLIB_PTHREAD_nlib_barrier_destroy
323 #endif
324 
325 #define NLIB_THREAD_ATTR_HAS_NATIVE
326 
327 #ifndef pthread_cleanup_push
328 # error pthread_cleanup_push must be a macro
329 #endif
330 
331 #ifndef pthread_cleanup_pop
332 # error pthread_cleanup_pop must be a macro
333 #endif
334 
335 #define NLIB_LIBC_nlib_memcmp
336 #define NLIB_LIBC_nlib_strlen
337 #define NLIB_LIBC_nlib_strnlen
338 #if defined(__STDC_LIB_EXT1__)
339 # define NLIB_LIBC_nlib_wcslen
340 # define NLIB_LIBC_nlib_wcsnlen
341 # define NLIB_LIBC_nlib_strncpy
342 # define NLIB_LIBC_nlib_strcpy
343 # define NLIB_LIBC_nlib_wcsncpy
344 # define NLIB_LIBC_nlib_wcscpy
345 #endif
346 #define NLIB_LIBC_nlib_strchr
347 #define NLIB_LIBC_nlib_strrchr
348 
349 #ifdef __cplusplus
350 extern "C" {
351 #endif
352 
353 #if (defined(__clang__) && defined(NLIB_64BIT)) || \
354  (defined(__GNUC__) && __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
355 
356 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED
357 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
358 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE
359 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
360 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
361 
362 #if defined(NLIB_DOXYGEN)
363 int32_t nlib_atomic_load32(const int32_t* ptr, int memorder);
364 void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder);
365 int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val, int memorder);
366 int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
367  int32_t desired, int weak,
368  int success_memorder, int failure_memorder);
369 int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val, int memorder);
370 int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val, int memorder);
371 int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val, int memorder);
372 int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val, int memorder);
373 int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val, int memorder);
374 int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val, int memorder);
375 int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val, int memorder);
376 int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val, int memorder);
377 int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val, int memorder);
378 int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val, int memorder);
379 
380 int64_t nlib_atomic_load64(const int64_t* ptr, int memorder);
381 void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder);
382 int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val, int memorder);
383 int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
384  int64_t desired, int weak,
385  int success_memorder, int failure_memorder);
386 int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val, int memorder);
387 int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val, int memorder);
388 int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val, int memorder);
389 int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val, int memorder);
390 int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val, int memorder);
391 int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val, int memorder);
392 int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val, int memorder);
393 int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val, int memorder);
394 int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val, int memorder);
395 int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val, int memorder);
396 
397 void* nlib_atomic_loadptr(void* const* ptr, int memorder);
398 void nlib_atomic_storeptr(void** ptr, void* val, int memorder);
399 int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
400  void* desired, int weak,
401  int success_memorder, int failure_memorder);
402 void nlib_atomic_thread_fence(int memorder);
403 #endif
404 
405 NLIB_VIS_PUBLIC extern pthread_mutex_t nlib_tsan_lock;
406 #if defined(__has_feature) && __has_feature(thread_sanitizer)
407 #define NLIB_TSAN_LOCK pthread_mutex_lock(&nlib_tsan_lock);
408 #define NLIB_TSAN_UNLOCK pthread_mutex_unlock(&nlib_tsan_lock);
409 #else
410 #define NLIB_TSAN_LOCK
411 #define NLIB_TSAN_UNLOCK
412 #endif
413 
414 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
415  int32_t rval;
416  NLIB_TSAN_LOCK
417  rval = __atomic_load_n(ptr, memorder);
418  NLIB_TSAN_UNLOCK
419  return rval;
420 }
421 
422 static __inline void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder) {
423  NLIB_TSAN_LOCK
424  __atomic_store_n(ptr, val, memorder);
425  NLIB_TSAN_UNLOCK
426 }
427 
428 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
429  int memorder) {
430  int32_t rval;
431  NLIB_TSAN_LOCK
432  rval = __atomic_exchange_n(ptr, val, memorder);
433  NLIB_TSAN_UNLOCK
434  return rval;
435 }
436 
437 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
438  int32_t desired, int weak,
439  int success_memorder, int failure_memorder) {
440  int32_t rval;
441  NLIB_TSAN_LOCK
442  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
443  success_memorder, failure_memorder);
444  NLIB_TSAN_UNLOCK
445  return rval;
446 }
447 
448 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
449  int memorder) {
450  int32_t rval;
451  NLIB_TSAN_LOCK
452  rval = __atomic_add_fetch(ptr, val, memorder);
453  NLIB_TSAN_UNLOCK
454  return rval;
455 }
456 
457 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
458  int memorder) {
459  int32_t rval;
460  NLIB_TSAN_LOCK
461  rval = __atomic_sub_fetch(ptr, val, memorder);
462  NLIB_TSAN_UNLOCK
463  return rval;
464 }
465 
466 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
467  int memorder) {
468  int32_t rval;
469  NLIB_TSAN_LOCK
470  rval = __atomic_and_fetch(ptr, val, memorder);
471  NLIB_TSAN_UNLOCK
472  return rval;
473 }
474 
475 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
476  int memorder) {
477  int32_t rval;
478  NLIB_TSAN_LOCK
479  rval = __atomic_xor_fetch(ptr, val, memorder);
480  NLIB_TSAN_UNLOCK
481  return rval;
482 }
483 
484 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
485  int memorder) {
486  int32_t rval;
487  NLIB_TSAN_LOCK
488  rval = __atomic_or_fetch(ptr, val, memorder);
489  NLIB_TSAN_UNLOCK
490  return rval;
491 }
492 
493 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
494  int memorder) {
495  int32_t rval;
496  NLIB_TSAN_LOCK
497  rval = __atomic_fetch_add(ptr, val, memorder);
498  NLIB_TSAN_UNLOCK
499  return rval;
500 }
501 
502 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
503  int memorder) {
504  int32_t rval;
505  NLIB_TSAN_LOCK
506  rval = __atomic_fetch_sub(ptr, val, memorder);
507  NLIB_TSAN_UNLOCK
508  return rval;
509 }
510 
511 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
512  int memorder) {
513  int32_t rval;
514  NLIB_TSAN_LOCK
515  rval = __atomic_fetch_and(ptr, val, memorder);
516  NLIB_TSAN_UNLOCK
517  return rval;
518 }
519 
520 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
521  int memorder) {
522  int32_t rval;
523  NLIB_TSAN_LOCK
524  rval = __atomic_fetch_xor(ptr, val, memorder);
525  NLIB_TSAN_UNLOCK
526  return rval;
527 }
528 
529 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
530  int memorder) {
531  int32_t rval;
532  NLIB_TSAN_LOCK
533  rval = __atomic_fetch_or(ptr, val, memorder);
534  NLIB_TSAN_UNLOCK
535  return rval;
536 }
537 
538 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
539  int64_t rval;
540  NLIB_TSAN_LOCK
541  rval = __atomic_load_n(ptr, memorder);
542  NLIB_TSAN_UNLOCK
543  return rval;
544 }
545 
546 static __inline void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder) {
547  NLIB_TSAN_LOCK
548  __atomic_store_n(ptr, val, memorder);
549  NLIB_TSAN_UNLOCK
550 }
551 
552 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
553  int memorder) {
554  int64_t rval;
555  NLIB_TSAN_LOCK
556  rval = __atomic_exchange_n(ptr, val, memorder);
557  NLIB_TSAN_UNLOCK
558  return rval;
559 }
560 
561 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
562  int64_t desired, int weak,
563  int success_memorder, int failure_memorder) {
564  int64_t rval;
565  NLIB_TSAN_LOCK
566  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
567  success_memorder, failure_memorder);
568  NLIB_TSAN_UNLOCK
569  return rval;
570 }
571 
572 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
573  int memorder) {
574  int64_t rval;
575  NLIB_TSAN_LOCK
576  rval = __atomic_add_fetch(ptr, val, memorder);
577  NLIB_TSAN_UNLOCK
578  return rval;
579 }
580 
581 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
582  int memorder) {
583  int64_t rval;
584  NLIB_TSAN_LOCK
585  rval = __atomic_sub_fetch(ptr, val, memorder);
586  NLIB_TSAN_UNLOCK
587  return rval;
588 }
589 
590 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
591  int memorder) {
592  int64_t rval;
593  NLIB_TSAN_LOCK
594  rval = __atomic_and_fetch(ptr, val, memorder);
595  NLIB_TSAN_UNLOCK
596  return rval;
597 }
598 
599 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
600  int memorder) {
601  int64_t rval;
602  NLIB_TSAN_LOCK
603  rval = __atomic_xor_fetch(ptr, val, memorder);
604  NLIB_TSAN_UNLOCK
605  return rval;
606 }
607 
608 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
609  int memorder) {
610  int64_t rval;
611  NLIB_TSAN_LOCK
612  rval = __atomic_or_fetch(ptr, val, memorder);
613  NLIB_TSAN_UNLOCK
614  return rval;
615 }
616 
617 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
618  int memorder) {
619  int64_t rval;
620  NLIB_TSAN_LOCK
621  rval = __atomic_fetch_add(ptr, val, memorder);
622  NLIB_TSAN_UNLOCK
623  return rval;
624 }
625 
626 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
627  int memorder) {
628  int64_t rval;
629  NLIB_TSAN_LOCK
630  rval = __atomic_fetch_sub(ptr, val, memorder);
631  NLIB_TSAN_UNLOCK
632  return rval;
633 }
634 
635 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
636  int memorder) {
637  int64_t rval;
638  NLIB_TSAN_LOCK
639  rval = __atomic_fetch_and(ptr, val, memorder);
640  NLIB_TSAN_UNLOCK
641  return rval;
642 }
643 
644 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
645  int memorder) {
646  int64_t rval;
647  NLIB_TSAN_LOCK
648  rval = __atomic_fetch_xor(ptr, val, memorder);
649  NLIB_TSAN_UNLOCK
650  return rval;
651 }
652 
653 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
654  int memorder) {
655  int64_t rval;
656  NLIB_TSAN_LOCK
657  rval = __atomic_fetch_or(ptr, val, memorder);
658  NLIB_TSAN_UNLOCK
659  return rval;
660 }
661 
662 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
663  void* rval;
664  NLIB_TSAN_LOCK
665  rval = __atomic_load_n(ptr, memorder);
666  NLIB_TSAN_UNLOCK
667  return rval;
668 }
669 
670 static __inline void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
671  NLIB_TSAN_LOCK
672  __atomic_store_n(ptr, val, memorder);
673  NLIB_TSAN_UNLOCK
674 }
675 
676 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
677  void* desired, int weak,
678  int success_memorder, int failure_memorder) {
679  int rval;
680  NLIB_TSAN_LOCK
681  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
682  success_memorder, failure_memorder);
683  NLIB_TSAN_UNLOCK
684  return rval;
685 }
686 
687 static __inline void nlib_atomic_thread_fence(int memorder) {
688  __atomic_thread_fence(memorder);
689 }
690 
691 #else
692 #define NLIB_ATOMIC_RELAXED 0
693 #define NLIB_ATOMIC_ACQUIRE 1
694 #define NLIB_ATOMIC_RELEASE 2
695 #define NLIB_ATOMIC_ACQ_REL 3
696 #define NLIB_ATOMIC_SEQ_CST 7
697 
698 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
699  int32_t rval = *(volatile int32_t*)ptr;
700  (void)memorder;
701 #if !defined(__i386__) && !defined(__x86_64__)
702  if (memorder & NLIB_ATOMIC_ACQUIRE)
704 #endif
705  return rval;
706 }
707 
708 static NLIB_ALWAYS_INLINE void nlib_atomic_store32(int32_t* ptr, int32_t val,
709  int memorder) {
710  if (memorder == NLIB_ATOMIC_SEQ_CST)
711  __sync_synchronize();
712  else if (memorder & NLIB_ATOMIC_RELEASE)
714  __sync_lock_test_and_set(ptr, val);
715 }
716 
717 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
718  int memorder) {
719  if (memorder == NLIB_ATOMIC_SEQ_CST)
720  __sync_synchronize();
721  else if (memorder & NLIB_ATOMIC_RELEASE)
723  return __sync_lock_test_and_set(ptr, val);
724 }
725 
726 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
727  int32_t desired, int weak,
728  int success_memorder, int failure_memorder) {
729  int32_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
730  if (old == *expected) return 1;
731  *expected = old;
732 
733  (void)weak;
734  (void)success_memorder;
735  (void)failure_memorder;
736  return 0;
737 }
738 
739 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
740  int memorder) {
741  (void)memorder;
742  return __sync_add_and_fetch(ptr, val);
743 }
744 
745 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
746  int memorder) {
747  (void)memorder;
748  return __sync_sub_and_fetch(ptr, val);
749 }
750 
751 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
752  int memorder) {
753  (void)memorder;
754  return __sync_and_and_fetch(ptr, val);
755 }
756 
757 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
758  int memorder) {
759  (void)memorder;
760  return __sync_xor_and_fetch(ptr, val);
761 }
762 
763 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
764  int memorder) {
765  (void)memorder;
766  return __sync_or_and_fetch(ptr, val);
767 }
768 
769 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
770  int memorder) {
771  (void)memorder;
772  return __sync_fetch_and_add(ptr, val);
773 }
774 
775 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
776  int memorder) {
777  (void)memorder;
778  return __sync_fetch_and_sub(ptr, val);
779 }
780 
781 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
782  int memorder) {
783  (void)memorder;
784  return __sync_fetch_and_and(ptr, val);
785 }
786 
787 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
788  int memorder) {
789  (void)memorder;
790  return __sync_fetch_and_xor(ptr, val);
791 }
792 
793 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
794  int memorder) {
795  (void)memorder;
796  return __sync_fetch_and_or(ptr, val);
797 }
798 
799 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
800  int64_t rval = *(volatile int64_t*)ptr;
801  (void)memorder;
802 #if !defined(__i386__) && !defined(__x86_64__)
803  if (memorder & NLIB_ATOMIC_ACQUIRE)
805 #endif
806  return rval;
807 }
808 
809 static NLIB_ALWAYS_INLINE void nlib_atomic_store64(int64_t* ptr, int64_t val,
810  int memorder) {
811  if (memorder == NLIB_ATOMIC_SEQ_CST)
812  __sync_synchronize();
813  else if (memorder & NLIB_ATOMIC_RELEASE)
815  __sync_lock_test_and_set(ptr, val);
816 }
817 
818 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
819  int memorder) {
820  if (memorder == NLIB_ATOMIC_SEQ_CST)
821  __sync_synchronize();
822  else if (memorder & NLIB_ATOMIC_RELEASE)
824  return __sync_lock_test_and_set(ptr, val);
825 }
826 
827 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
828  int64_t desired, int weak,
829  int success_memorder, int failure_memorder) {
830  int64_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
831  if (old == *expected) return 1;
832  *expected = old;
833 
834  (void)weak;
835  (void)success_memorder;
836  (void)failure_memorder;
837  return 0;
838 }
839 
840 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
841  int memorder) {
842  (void)memorder;
843  return __sync_add_and_fetch(ptr, val);
844 }
845 
846 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
847  int memorder) {
848  (void)memorder;
849  return __sync_sub_and_fetch(ptr, val);
850 }
851 
852 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
853  int memorder) {
854  (void)memorder;
855  return __sync_and_and_fetch(ptr, val);
856 }
857 
858 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
859  int memorder) {
860  (void)memorder;
861  return __sync_xor_and_fetch(ptr, val);
862 }
863 
864 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
865  int memorder) {
866  (void)memorder;
867  return __sync_or_and_fetch(ptr, val);
868 }
869 
870 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
871  int memorder) {
872  (void)memorder;
873  return __sync_fetch_and_add(ptr, val);
874 }
875 
876 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
877  int memorder) {
878  (void)memorder;
879  return __sync_fetch_and_sub(ptr, val);
880 }
881 
882 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
883  int memorder) {
884  (void)memorder;
885  return __sync_fetch_and_and(ptr, val);
886 }
887 
888 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
889  int memorder) {
890  (void)memorder;
891  return __sync_fetch_and_xor(ptr, val);
892 }
893 
894 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
895  int memorder) {
896  (void)memorder;
897  return __sync_fetch_and_or(ptr, val);
898 }
899 
900 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
901  void* rval = *(void* volatile *)ptr; // NOLINT
902  (void)memorder;
903 #if !defined(__i386__) && !defined(__x86_64__)
904  if (memorder & NLIB_ATOMIC_ACQUIRE)
906 #endif
907  return rval;
908 }
909 
910 static NLIB_ALWAYS_INLINE void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
911  if (memorder == NLIB_ATOMIC_SEQ_CST)
912  __sync_synchronize();
913  else if (memorder & NLIB_ATOMIC_RELEASE)
915  void* tmp = __sync_lock_test_and_set(ptr, val);
916  (void)tmp;
917 }
918 
919 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
920  void* desired, int weak,
921  int success_memorder, int failure_memorder) {
922  void* old = __sync_val_compare_and_swap(ptr, *expected, desired);
923  if (old == *expected) return 1;
924  *expected = old;
925 
926  (void)weak;
927  (void)success_memorder;
928  (void)failure_memorder;
929  return 0;
930 }
931 
932 static __inline void nlib_atomic_thread_fence(int memorder) {
933  switch (memorder) {
934  case NLIB_ATOMIC_RELAXED:
935  break;
936  case NLIB_ATOMIC_ACQUIRE:
938  break;
939  case NLIB_ATOMIC_RELEASE:
941  break;
942  case NLIB_ATOMIC_ACQ_REL:
944  break;
945  default:
947  break;
948  }
949 }
950 #endif
951 
952 
953 #ifdef __cplusplus
954 }
955 #endif
956 
957 #endif
958 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
int32_t nlib_atomic_xor_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_fetch_and64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
static void nlib_spinlock_unlock(nlib_spinlock *lock)
Unlocks the spinlock.
Definition: Platform.h:2666
int nlib_atomic_compare_exchangeptr(void **ptr, void **expected, void *desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int32_t nlib_atomic_load32(const int32_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
#define NLIB_ALWAYS_INLINE
Indicates that the compiler is forced to perform inline expansion of functions.
Definition: Platform_unix.h:69
int64_t nlib_atomic_fetch_add64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
struct nlib_rwlock_ nlib_rwlock
The type for a read-write lock object.
Definition: Platform.h:1113
sem_t nlib_semaphore
The type for a semaphore object.
#define NLIB_MEMORY_ORDER_ACQUIRE
A memory fence. Corresponds to atomic_thread_fence(memory_order_acquire) in C++11.
#define NLIB_ATOMIC_RELEASE
Similar to __ATOMIC_RELEASE of gcc or std::memory_order_release of C++11.
int32_t nlib_atomic_or_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
int64_t nlib_atomic_fetch_sub64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int64_t nlib_atomic_and_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange64(int64_t *ptr, int64_t *expected, int64_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
#define NLIB_VIS_PUBLIC
Symbols for functions and classes are made available outside of the library.
Definition: Platform_unix.h:61
int64_t nlib_atomic_fetch_or64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
#define NLIB_ATOMIC_ACQ_REL
Similar to __ATOMIC_ACQ_REL of gcc or std::memory_order_acq_rel of C++11.
#define NLIB_ATOMIC_ACQUIRE
Similar to __ATOMIC_ACQUIRE of gcc or std::memory_order_acquire of C++11.
pthread_key_t nlib_tls
The type for TLS slot IDs.
void * nlib_atomic_loadptr(void *const *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
int64_t nlib_atomic_exchange64(int64_t *ptr, int64_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
#define NLIB_MEMORY_ORDER_SEQ_CST
A memory fence. Corresponds to atomic_thread_fence(memory_order_seq_cst) in C++11.
int64_t nlib_atomic_sub_fetch64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_xor32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
struct nlib_barrier_ nlib_barrier
The type for a barrier object.
Definition: Platform.h:1329
static void nlib_spinlock_init(nlib_spinlock *lock)
Initializes the spinlock.
Definition: Platform.h:2602
#define NLIB_MEMORY_ORDER_ACQ_REL
A memory fence. Corresponds to atomic_thread_fence(memory_order_acq_rel) in C++11.
int32_t nlib_atomic_sub_fetch32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_sub32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int32_t nlib_atomic_add_fetch32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
void nlib_atomic_storeptr(void **ptr, void *val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
static errno_t nlib_spinlock_trylock(nlib_spinlock *lock)
Locks the spinlock. Returns 0 if successful or EBUSY if fails.
Definition: Platform.h:2635
Defines a semaphore.
void nlib_atomic_thread_fence(int memorder)
Places the specified memory barrier.
int32_t nlib_atomic_fetch_and32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
pthread_cond_t nlib_cond
The type for a condition variable object.
int32_t nlib_spinlock
Spinlock variable type. Used by statically initializing with NLIB_SPINLOCK_INITIALIZER.
Definition: Platform.h:1435
#define NLIB_ATOMIC_RELAXED
Similar to __ATOMIC_RELAXED of gcc or std::memory_order_relaxed of C++11.
pthread_mutex_t nlib_mutex
The type for mutex variables.
int64_t nlib_atomic_xor_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
#define NLIB_MEMORY_ORDER_RELEASE
A memory fence. Corresponds to atomic_thread_fence(memory_order_release) in C++11.
int64_t nlib_atomic_add_fetch64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
int32_t nlib_atomic_and_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange32(int32_t *ptr, int32_t *expected, int32_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
void nlib_atomic_store64(int64_t *ptr, int64_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int64_t nlib_atomic_or_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
#define NLIB_ATOMIC_SEQ_CST
Similar to __ATOMIC_SEQ_CST of gcc or std::memory_order_seq_cst of C++11.
int32_t nlib_atomic_fetch_or32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
static void nlib_spinlock_lock(nlib_spinlock *lock)
Locks the spinlock. Behavior is undefined if a recursive lock is performed.
Definition: Platform.h:2605
int64_t nlib_atomic_fetch_xor64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
int32_t nlib_atomic_fetch_add32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
void nlib_atomic_store32(int32_t *ptr, int32_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int32_t nlib_atomic_exchange32(int32_t *ptr, int32_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
pthread_t nlib_thread
The identifier for threads.
int64_t nlib_atomic_load64(const int64_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...