nlib
Platform_unix.h
1 
2 /*--------------------------------------------------------------------------------*
3  Project: CrossRoad
4  Copyright (C)Nintendo All rights reserved.
5 
6  These coded instructions, statements, and computer programs contain proprietary
7  information of Nintendo and/or its licensed developers and are protected by
8  national and international copyright laws. They may not be disclosed to third
9  parties or copied or duplicated in any form, in whole or in part, without the
10  prior written consent of Nintendo.
11 
12  The content herein is highly confidential and should be handled accordingly.
13  *--------------------------------------------------------------------------------*/
14 
15 #pragma once
16 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
17 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
18 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
19 # error do not include directly
20 #endif
21 
22 #if defined(__linux__) || \
23  defined(__FreeBSD__) || \
24  defined(__CYGWIN__) || \
25  (defined(__APPLE__) && defined(__MACH__))
26 
27 #ifdef __cplusplus
28 // http://cpprefjp.github.io/reference/cstdint.html
29 // Older libc may require __STDC_LIMIT_MACROS and __STDC_CONSTANT_MACROS defined.
30 #ifndef __STDC_LIMIT_MACROS
31 #warning __STDC_LIMIT_MACROS not defined, compile may fail
32 #define __STDC_LIMIT_MACROS
33 #endif
34 #ifndef __STDC_CONSTANT_MACROS
35 #warning __STDC_CONSTANT_MACROS not defined, compile may fail
36 #define __STDC_CONSTANT_MACROS
37 #endif
38 #endif
39 
40 #if defined(__APPLE__) && defined(__MACH__)
41 #define _DARWIN_UNLIMITED_SELECT
42 #include <libkern/OSAtomic.h>
43 #include <errno.h>
44 #if __has_include( <os/lock.h> )
45 #include <os/lock.h>
46 #endif
47 #endif
48 
49 #ifdef __cplusplus
50 extern "C" {
51 #endif
52 
53 #ifndef NLIB_UNIX
54 # define NLIB_UNIX
55 #endif
56 
57 // For now, only supports gcc or clang
58 #if !defined(__GNUC__) && !defined(__clang__)
59 # error
60 #endif
61 
62 #if !defined(__FreeBSD__) && !defined(__APPLE__)
63 // checking __GNU_LIBRARY__, __GLIBC__, __GLIBC_MINOR__ to detect glibc
64 #include <features.h>
65 #endif
66 #include <pthread.h> // for PTHREAD_MUTEX_INITIALIZER, ....
67 #include <semaphore.h> // for sem_t
68 #include <sys/types.h> // for pthread_mutex_t, ....
69 #include <sys/uio.h> // struct iovec
70 #include <fcntl.h> // NOLINT
71 #include <sys/socket.h>
72 #include <netinet/tcp.h> // TCP_FASTOPEN
73 #include <poll.h>
74 #include <netinet/in.h>
75 #include <arpa/inet.h>
76 #include <netdb.h>
77 #include <stdint.h>
78 
79 #if defined(__FreeBSD__) || defined(__APPLE__)
80 #include <dispatch/dispatch.h>
81 #endif
82 
83 #if defined(__i386__) || defined(__x86_64__)
84 # include <x86intrin.h>
85 #endif
86 
87 #ifndef __CYGWIN__
88 # define NLIB_VIS_HIDDEN __attribute__((visibility("hidden")))
89 # define NLIB_VIS_PUBLIC __attribute__((visibility("default")))
90 # define NLIB_WEAKSYMBOL __attribute__((weak))
91 #else
92 # define NLIB_VIS_HIDDEN
93 # define NLIB_VIS_PUBLIC
94 # define NLIB_WEAKSYMBOL
95 #endif
96 
97 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
98 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
99 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
100 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
101 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
102 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
103 #define NLIB_NORETURN __attribute__((noreturn))
104 #define NLIB_NONNULL __attribute__((nonnull))
105 #define NLIB_NONNULL_1 __attribute__((nonnull (1)))
106 #define NLIB_NONNULL_2 __attribute__((nonnull (2)))
107 #define NLIB_NONNULL_3 __attribute__((nonnull (3)))
108 #define NLIB_NONNULL_4 __attribute__((nonnull (4)))
109 #define NLIB_NONNULL_5 __attribute__((nonnull (5)))
110 #define NLIB_NONNULL_ENABLED
111 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
112 #define NLIB_ATTRIBUTE_PURE __attribute__((pure))
113 #define NLIB_ATTRIBUTE_CONST __attribute__((const))
114 
115 #ifdef __clang__
116 # if __has_attribute(alloc_size)
117 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
118 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
119 # else
120 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
121 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
122 # endif
123 # if __has_attribute(alloc_align)
124 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
125 # else
126 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
127 # endif
128 # if __has_attribute(assume_aligned)
129 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
130 # else
131 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
132 # endif
133 #else
134 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
135 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
136 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
137 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
138 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
139 # else
140 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
141 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
142 # endif
143 #endif
144 
145 #ifndef NLIB_DEPRECATED
146 #define NLIB_DEPRECATED __attribute__((deprecated))
147 #endif
148 #ifndef NLIB_DEPRECATED_MSG
149 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated))
150 #endif
151 
152 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN)
153 # define NLIB_LITTLE_ENDIAN
154 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN)
155 # undef NLIB_LITTLE_ENDIAN
156 #else
157 # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
158 # define NLIB_LITTLE_ENDIAN
159 # else
160 # undef NLIB_LITTLE_ENDIAN
161 # endif
162 #endif
163 #if defined(__x86_64__) || defined(__aarch64__)
164 # define NLIB_64BIT
165 #endif
166 
167 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7
168 # if !defined(__i386__) && !defined(__x86_64__)
169 # error Sorry
170 # endif
171 #define NLIB_MEMORY_ORDER_RELEASE __asm__ __volatile__("sfence": : :"memory")
172 #define NLIB_MEMORY_ORDER_ACQUIRE __asm__ __volatile__("lfence": : :"memory")
173 #define NLIB_MEMORY_ORDER_ACQ_REL __asm__ __volatile__("mfence": : :"memory")
174 #define NLIB_MEMORY_ORDER_SEQ_CST __sync_synchronize()
175 #else
176 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE)
177 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE)
178 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL)
179 #define NLIB_MEMORY_ORDER_SEQ_CST __atomic_thread_fence(__ATOMIC_SEQ_CST)
180 #endif
181 
182 typedef pthread_key_t nlib_tls;
183 #define NLIB_PTHREAD_nlib_tls_alloc
184 #define NLIB_PTHREAD_nlib_tls_free
185 #define NLIB_PTHREAD_nlib_tls_setvalue
186 #define NLIB_PTHREAD_nlib_tls_getvalue
187 
188 #ifndef _LIBCPP_VERSION
189 NLIB_CAPABILITY("mutex")
190 #endif
191 typedef pthread_mutex_t nlib_mutex;
192 
193 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
194 # ifdef __FreeBSD__
195 // https://stackoverflow.com/questions/10369606/constexpr-pointer-value
196 // ((pthread_mutex_t)1) in FreeBSD
197 # define NLIB_MUTEX_INITIALIZER (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) ? \
198  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP : \
199  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP)
200 # else
201 # define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
202 # endif
203 #else
204 # define NLIB_PTHREAD_nlib_mutex_init
205 # define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER
206 #endif
207 
208 #ifndef __FreeBSD__
209 #define NLIB_PTHREAD_nlib_mutex_lock
210 #define NLIB_PTHREAD_nlib_mutex_unlock
211 #define NLIB_PTHREAD_nlib_mutex_trylock
212 #define NLIB_PTHREAD_nlib_mutex_destroy
213 #endif
214 
215 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP)
216 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
217 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
218 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER)
219 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
220 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
221 #elif defined(__FreeBSD__)
222 // FreeBSD does not support static initializer for recursive mutex
223 // nlib initializes recursive mutex if the value is 255
224 # define NLIB_RECURSIVE_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
225  (pthread_mutex_t)255 : (pthread_mutex_t)255)
226 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
227  (pthread_mutex_t)255 : (pthread_mutex_t)255)
228 #elif defined(NLIB_ALPINE)
229 // hacked...
230 #define NLIB_RECURSIVE_MUTEX_INITIALIZER {{{1}}}
231 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER {{{1}}}
232 #else
233 # error Sorry
234 #endif
235 
236 #if defined(__APPLE__)
237 typedef int nlib_semaphore;
238 #else
239 typedef sem_t nlib_semaphore;
240 #endif
241 
242 typedef pthread_cond_t nlib_cond;
243 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER
244 
245 #define NLIB_PTHREAD_nlib_cond_init
246 #define NLIB_PTHREAD_nlib_cond_signal
247 #define NLIB_PTHREAD_nlib_cond_broadcast
248 #define NLIB_PTHREAD_nlib_cond_wait
249 #define NLIB_PTHREAD_nlib_cond_destroy
250 
251 typedef pthread_t nlib_thread;
252 
253 #define NLIB_PTHREAD_nlib_thread_join
254 #define NLIB_PTHREAD_nlib_thread_detach
255 #define NLIB_PTHREAD_nlib_thread_equal
256 #define NLIB_PTHREAD_nlib_thread_self
257 
258 #if defined(__APPLE__)
259 #define NLIB_SPINLOCK_HAS_NATIVE
260 #if __has_include( <os/lock.h> )
261 typedef os_unfair_lock nlib_spinlock;
262 #define NLIB_SPINLOCK_INITIALIZER OS_UNFAIR_LOCK_INIT
263 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
264  *lock = OS_UNFAIR_LOCK_INIT;
265 }
266 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
267  os_unfair_lock_lock(lock);
268 }
269 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
270  return os_unfair_lock_trylock(lock) ? 0 : EBUSY;
271 }
272 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
273  os_unfair_lock_unlock(lock);
274 }
275 #else
276 typedef OSSpinLock nlib_spinlock;
277 #define NLIB_SPINLOCK_INITIALIZER (0)
278 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
279  *lock = 0;
280 }
281 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
282  OSSpinLockLock(lock);
283 }
284 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
285  return OSSpinLockTry(lock) ? 0 : EBUSY;
286 }
287 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
288  OSSpinLockUnlock(lock);
289 }
290 #endif
291 #endif
292 
293 #ifdef __cplusplus
294 }
295 #endif
296 
297 #if defined(__clang__)
298 # if __has_feature(cxx_unicode_literals)
299 # define NLIB_CXX11_NEW_CHARACTER_TYPES
300 # endif
301 # if __has_feature(cxx_exceptions)
302 # if __has_feature(cxx_noexcept)
303 # define NLIB_CXX11_NOEXCEPT
304 # endif
305 # else
306 # define NLIB_NOEXCEPT
307 # endif
308 #else
309 // __GNUC__
310 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 4)
311 # define NLIB_CXX11_NEW_CHARACTER_TYPES
312 # endif
313 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)
314 # define NLIB_CXX11_NOEXCEPT
315 # endif
316 // # if !defined(__CYGWIN__) && (!defined(__GLIBC__) || __GLIBC__ < 2 || __GLIBC_MINOR__ < 1)
317 // # error Sorry, glibc is old.
318 // # endif
319 #endif
320 
321 #define NLIB_ONCE_HAS_NATIVE
322 #define NLIB_TIMESPEC_HAS_NATIVE
323 #define NLIB_IOVEC_HAS_NATIVE
324 
325 #ifdef PTHREAD_RWLOCK_INITIALIZER
326 # define NLIB_RWLOCK_HAS_NATIVE
327 #endif
328 #ifdef NLIB_RWLOCK_HAS_NATIVE
329 #ifndef _LIBCPP_VERSION
330 NLIB_CAPABILITY("mutex")
331 #endif
332 typedef pthread_rwlock_t nlib_rwlock;
333 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER
334 
335 #define NLIB_PTHREAD_nlib_rwlock_init
336 #define NLIB_PTHREAD_nlib_rwlock_destroy
337 #define NLIB_PTHREAD_nlib_rwlock_tryrdlock
338 #define NLIB_PTHREAD_nlib_rwlock_trywrlock
339 #define NLIB_PTHREAD_nlib_rwlock_rdlock
340 #define NLIB_PTHREAD_nlib_rwlock_rdunlock
341 #define NLIB_PTHREAD_nlib_rwlock_wrlock
342 #define NLIB_PTHREAD_nlib_rwlock_wrunlock
343 #endif
344 
345 #ifdef PTHREAD_BARRIER_SERIAL_THREAD
346 # define NLIB_BARRIER_HAS_NATIVE
347 #endif
348 #ifdef NLIB_BARRIER_HAS_NATIVE
349 typedef pthread_barrier_t nlib_barrier;
350 #define NLIB_PTHREAD_nlib_barrier_init
351 #define NLIB_PTHREAD_nlib_barrier_destroy
352 #endif
353 
354 #define NLIB_THREAD_ATTR_HAS_NATIVE
355 
356 #ifndef pthread_cleanup_push
357 # error pthread_cleanup_push must be a macro
358 #endif
359 
360 #ifndef pthread_cleanup_pop
361 # error pthread_cleanup_pop must be a macro
362 #endif
363 
364 #ifdef __linux__
365 // NOTE:
366 // The implementation by timerfd_create() and epoll() consumes file descriptors.
367 // The difference of the maximum numbers of file descriptors might limit the portability.
368 // Also, the performance is not better than our generic version.
369 // #define NLIB_TIMER_HAS_NATIVE
370 #endif
371 
372 #define NLIB_LIBC_nlib_memcmp
373 #define NLIB_LIBC_nlib_strlen
374 #define NLIB_LIBC_nlib_strnlen
375 #if defined(__STDC_LIB_EXT1__)
376 # define NLIB_LIBC_nlib_wcslen
377 # define NLIB_LIBC_nlib_wcsnlen
378 # define NLIB_LIBC_nlib_strncpy
379 # define NLIB_LIBC_nlib_strcpy
380 # define NLIB_LIBC_nlib_wcsncpy
381 # define NLIB_LIBC_nlib_wcscpy
382 #endif
383 #define NLIB_LIBC_nlib_strchr
384 #define NLIB_LIBC_nlib_strrchr
385 
386 #ifdef __cplusplus
387 extern "C" {
388 #endif
389 
390 #if (defined(__clang__) && defined(NLIB_64BIT)) || \
391  (defined(__GNUC__) && __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
392 
393 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED
394 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
395 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE
396 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
397 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
398 
399 #if defined(NLIB_DOXYGEN)
400 int32_t nlib_atomic_load32(const int32_t* ptr, int memorder);
401 void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder);
402 int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val, int memorder);
403 int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
404  int32_t desired, int weak,
405  int success_memorder, int failure_memorder);
406 int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val, int memorder);
407 int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val, int memorder);
408 int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val, int memorder);
409 int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val, int memorder);
410 int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val, int memorder);
411 int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val, int memorder);
412 int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val, int memorder);
413 int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val, int memorder);
414 int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val, int memorder);
415 int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val, int memorder);
416 
417 int64_t nlib_atomic_load64(const int64_t* ptr, int memorder);
418 void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder);
419 int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val, int memorder);
420 int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
421  int64_t desired, int weak,
422  int success_memorder, int failure_memorder);
423 int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val, int memorder);
424 int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val, int memorder);
425 int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val, int memorder);
426 int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val, int memorder);
427 int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val, int memorder);
428 int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val, int memorder);
429 int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val, int memorder);
430 int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val, int memorder);
431 int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val, int memorder);
432 int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val, int memorder);
433 
434 void* nlib_atomic_loadptr(void* const* ptr, int memorder);
435 void nlib_atomic_storeptr(void** ptr, void* val, int memorder);
436 void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder);
437 int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
438  void* desired, int weak,
439  int success_memorder, int failure_memorder);
440 void nlib_atomic_thread_fence(int memorder);
441 #endif
442 
443 NLIB_VIS_PUBLIC extern pthread_mutex_t nlib_tsan_lock;
444 #if defined(__has_feature) && __has_feature(thread_sanitizer)
445 #define NLIB_TSAN_LOCK pthread_mutex_lock(&nlib_tsan_lock);
446 #define NLIB_TSAN_UNLOCK pthread_mutex_unlock(&nlib_tsan_lock);
447 #else
448 #define NLIB_TSAN_LOCK
449 #define NLIB_TSAN_UNLOCK
450 #endif
451 
452 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
453  int32_t rval;
454  NLIB_TSAN_LOCK
455  rval = __atomic_load_n(ptr, memorder);
456  NLIB_TSAN_UNLOCK
457  return rval;
458 }
459 
460 static __inline void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder) {
461  NLIB_TSAN_LOCK
462  __atomic_store_n(ptr, val, memorder);
463  NLIB_TSAN_UNLOCK
464 }
465 
466 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
467  int memorder) {
468  int32_t rval;
469  NLIB_TSAN_LOCK
470  rval = __atomic_exchange_n(ptr, val, memorder);
471  NLIB_TSAN_UNLOCK
472  return rval;
473 }
474 
475 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
476  int32_t desired, int weak,
477  int success_memorder, int failure_memorder) {
478  int rval;
479  NLIB_TSAN_LOCK
480  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
481  success_memorder, failure_memorder);
482  NLIB_TSAN_UNLOCK
483  return rval;
484 }
485 
486 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
487  int memorder) {
488  int32_t rval;
489  NLIB_TSAN_LOCK
490  rval = __atomic_add_fetch(ptr, val, memorder);
491  NLIB_TSAN_UNLOCK
492  return rval;
493 }
494 
495 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
496  int memorder) {
497  int32_t rval;
498  NLIB_TSAN_LOCK
499  rval = __atomic_sub_fetch(ptr, val, memorder);
500  NLIB_TSAN_UNLOCK
501  return rval;
502 }
503 
504 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
505  int memorder) {
506  int32_t rval;
507  NLIB_TSAN_LOCK
508  rval = __atomic_and_fetch(ptr, val, memorder);
509  NLIB_TSAN_UNLOCK
510  return rval;
511 }
512 
513 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
514  int memorder) {
515  int32_t rval;
516  NLIB_TSAN_LOCK
517  rval = __atomic_xor_fetch(ptr, val, memorder);
518  NLIB_TSAN_UNLOCK
519  return rval;
520 }
521 
522 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
523  int memorder) {
524  int32_t rval;
525  NLIB_TSAN_LOCK
526  rval = __atomic_or_fetch(ptr, val, memorder);
527  NLIB_TSAN_UNLOCK
528  return rval;
529 }
530 
531 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
532  int memorder) {
533  int32_t rval;
534  NLIB_TSAN_LOCK
535  rval = __atomic_fetch_add(ptr, val, memorder);
536  NLIB_TSAN_UNLOCK
537  return rval;
538 }
539 
540 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
541  int memorder) {
542  int32_t rval;
543  NLIB_TSAN_LOCK
544  rval = __atomic_fetch_sub(ptr, val, memorder);
545  NLIB_TSAN_UNLOCK
546  return rval;
547 }
548 
549 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
550  int memorder) {
551  int32_t rval;
552  NLIB_TSAN_LOCK
553  rval = __atomic_fetch_and(ptr, val, memorder);
554  NLIB_TSAN_UNLOCK
555  return rval;
556 }
557 
558 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
559  int memorder) {
560  int32_t rval;
561  NLIB_TSAN_LOCK
562  rval = __atomic_fetch_xor(ptr, val, memorder);
563  NLIB_TSAN_UNLOCK
564  return rval;
565 }
566 
567 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
568  int memorder) {
569  int32_t rval;
570  NLIB_TSAN_LOCK
571  rval = __atomic_fetch_or(ptr, val, memorder);
572  NLIB_TSAN_UNLOCK
573  return rval;
574 }
575 
576 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
577  int64_t rval;
578  NLIB_TSAN_LOCK
579  rval = __atomic_load_n(ptr, memorder);
580  NLIB_TSAN_UNLOCK
581  return rval;
582 }
583 
584 static __inline void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder) {
585  NLIB_TSAN_LOCK
586  __atomic_store_n(ptr, val, memorder);
587  NLIB_TSAN_UNLOCK
588 }
589 
590 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
591  int memorder) {
592  int64_t rval;
593  NLIB_TSAN_LOCK
594  rval = __atomic_exchange_n(ptr, val, memorder);
595  NLIB_TSAN_UNLOCK
596  return rval;
597 }
598 
599 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
600  void* rval;
601  NLIB_TSAN_LOCK
602  rval = __atomic_exchange_n(ptr, val, memorder);
603  NLIB_TSAN_UNLOCK
604  return rval;
605 }
606 
607 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
608  int64_t desired, int weak,
609  int success_memorder, int failure_memorder) {
610  int rval;
611  NLIB_TSAN_LOCK
612  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
613  success_memorder, failure_memorder);
614  NLIB_TSAN_UNLOCK
615  return rval;
616 }
617 
618 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
619  int memorder) {
620  int64_t rval;
621  NLIB_TSAN_LOCK
622  rval = __atomic_add_fetch(ptr, val, memorder);
623  NLIB_TSAN_UNLOCK
624  return rval;
625 }
626 
627 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
628  int memorder) {
629  int64_t rval;
630  NLIB_TSAN_LOCK
631  rval = __atomic_sub_fetch(ptr, val, memorder);
632  NLIB_TSAN_UNLOCK
633  return rval;
634 }
635 
636 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
637  int memorder) {
638  int64_t rval;
639  NLIB_TSAN_LOCK
640  rval = __atomic_and_fetch(ptr, val, memorder);
641  NLIB_TSAN_UNLOCK
642  return rval;
643 }
644 
645 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
646  int memorder) {
647  int64_t rval;
648  NLIB_TSAN_LOCK
649  rval = __atomic_xor_fetch(ptr, val, memorder);
650  NLIB_TSAN_UNLOCK
651  return rval;
652 }
653 
654 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
655  int memorder) {
656  int64_t rval;
657  NLIB_TSAN_LOCK
658  rval = __atomic_or_fetch(ptr, val, memorder);
659  NLIB_TSAN_UNLOCK
660  return rval;
661 }
662 
663 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
664  int memorder) {
665  int64_t rval;
666  NLIB_TSAN_LOCK
667  rval = __atomic_fetch_add(ptr, val, memorder);
668  NLIB_TSAN_UNLOCK
669  return rval;
670 }
671 
672 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
673  int memorder) {
674  int64_t rval;
675  NLIB_TSAN_LOCK
676  rval = __atomic_fetch_sub(ptr, val, memorder);
677  NLIB_TSAN_UNLOCK
678  return rval;
679 }
680 
681 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
682  int memorder) {
683  int64_t rval;
684  NLIB_TSAN_LOCK
685  rval = __atomic_fetch_and(ptr, val, memorder);
686  NLIB_TSAN_UNLOCK
687  return rval;
688 }
689 
690 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
691  int memorder) {
692  int64_t rval;
693  NLIB_TSAN_LOCK
694  rval = __atomic_fetch_xor(ptr, val, memorder);
695  NLIB_TSAN_UNLOCK
696  return rval;
697 }
698 
699 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
700  int memorder) {
701  int64_t rval;
702  NLIB_TSAN_LOCK
703  rval = __atomic_fetch_or(ptr, val, memorder);
704  NLIB_TSAN_UNLOCK
705  return rval;
706 }
707 
708 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
709  void* rval;
710  NLIB_TSAN_LOCK
711  rval = __atomic_load_n(ptr, memorder);
712  NLIB_TSAN_UNLOCK
713  return rval;
714 }
715 
716 static __inline void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
717  NLIB_TSAN_LOCK
718  __atomic_store_n(ptr, val, memorder);
719  NLIB_TSAN_UNLOCK
720 }
721 
722 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
723  void* desired, int weak,
724  int success_memorder, int failure_memorder) {
725  int rval;
726  NLIB_TSAN_LOCK
727  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
728  success_memorder, failure_memorder);
729  NLIB_TSAN_UNLOCK
730  return rval;
731 }
732 
733 static __inline void nlib_atomic_thread_fence(int memorder) {
734  __atomic_thread_fence(memorder);
735 }
736 
737 #else
738 #define NLIB_ATOMIC_RELAXED 0
739 #define NLIB_ATOMIC_ACQUIRE 1
740 #define NLIB_ATOMIC_RELEASE 2
741 #define NLIB_ATOMIC_ACQ_REL 3
742 #define NLIB_ATOMIC_SEQ_CST 7
743 
744 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
745  int32_t rval = *(volatile int32_t*)ptr;
746  (void)memorder;
747 #if !defined(__i386__) && !defined(__x86_64__)
748  if (memorder & NLIB_ATOMIC_ACQUIRE)
750 #endif
751  return rval;
752 }
753 
754 static NLIB_ALWAYS_INLINE void nlib_atomic_store32(int32_t* ptr, int32_t val,
755  int memorder) {
756  if (memorder == NLIB_ATOMIC_SEQ_CST)
757  __sync_synchronize();
758  else if (memorder & NLIB_ATOMIC_RELEASE)
760  __sync_lock_test_and_set(ptr, val);
761 }
762 
763 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
764  int memorder) {
765  if (memorder == NLIB_ATOMIC_SEQ_CST)
766  __sync_synchronize();
767  else if (memorder & NLIB_ATOMIC_RELEASE)
769  return __sync_lock_test_and_set(ptr, val);
770 }
771 
772 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
773  int32_t desired, int weak,
774  int success_memorder, int failure_memorder) {
775  int32_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
776  if (old == *expected) return 1;
777  *expected = old;
778 
779  (void)weak;
780  (void)success_memorder;
781  (void)failure_memorder;
782  return 0;
783 }
784 
785 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
786  int memorder) {
787  (void)memorder;
788  return __sync_add_and_fetch(ptr, val);
789 }
790 
791 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
792  int memorder) {
793  (void)memorder;
794  return __sync_sub_and_fetch(ptr, val);
795 }
796 
797 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
798  int memorder) {
799  (void)memorder;
800  return __sync_and_and_fetch(ptr, val);
801 }
802 
803 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
804  int memorder) {
805  (void)memorder;
806  return __sync_xor_and_fetch(ptr, val);
807 }
808 
809 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
810  int memorder) {
811  (void)memorder;
812  return __sync_or_and_fetch(ptr, val);
813 }
814 
815 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
816  int memorder) {
817  (void)memorder;
818  return __sync_fetch_and_add(ptr, val);
819 }
820 
821 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
822  int memorder) {
823  (void)memorder;
824  return __sync_fetch_and_sub(ptr, val);
825 }
826 
827 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
828  int memorder) {
829  (void)memorder;
830  return __sync_fetch_and_and(ptr, val);
831 }
832 
833 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
834  int memorder) {
835  (void)memorder;
836  return __sync_fetch_and_xor(ptr, val);
837 }
838 
839 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
840  int memorder) {
841  (void)memorder;
842  return __sync_fetch_and_or(ptr, val);
843 }
844 
845 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
846  int64_t rval = *(volatile int64_t*)ptr;
847  (void)memorder;
848 #if !defined(__i386__) && !defined(__x86_64__)
849  if (memorder & NLIB_ATOMIC_ACQUIRE)
851 #endif
852  return rval;
853 }
854 
855 static NLIB_ALWAYS_INLINE void nlib_atomic_store64(int64_t* ptr, int64_t val,
856  int memorder) {
857  if (memorder == NLIB_ATOMIC_SEQ_CST)
858  __sync_synchronize();
859  else if (memorder & NLIB_ATOMIC_RELEASE)
861  __sync_lock_test_and_set(ptr, val);
862 }
863 
864 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
865  int memorder) {
866  if (memorder == NLIB_ATOMIC_SEQ_CST)
867  __sync_synchronize();
868  else if (memorder & NLIB_ATOMIC_RELEASE)
870  return __sync_lock_test_and_set(ptr, val);
871 }
872 
873 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
874  if (memorder == NLIB_ATOMIC_SEQ_CST)
875  __sync_synchronize();
876  else if (memorder & NLIB_ATOMIC_RELEASE)
878  return __sync_lock_test_and_set(ptr, val);
879 }
880 
881 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
882  int64_t desired, int weak,
883  int success_memorder, int failure_memorder) {
884  int64_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
885  if (old == *expected) return 1;
886  *expected = old;
887 
888  (void)weak;
889  (void)success_memorder;
890  (void)failure_memorder;
891  return 0;
892 }
893 
894 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
895  int memorder) {
896  (void)memorder;
897  return __sync_add_and_fetch(ptr, val);
898 }
899 
900 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
901  int memorder) {
902  (void)memorder;
903  return __sync_sub_and_fetch(ptr, val);
904 }
905 
906 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
907  int memorder) {
908  (void)memorder;
909  return __sync_and_and_fetch(ptr, val);
910 }
911 
912 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
913  int memorder) {
914  (void)memorder;
915  return __sync_xor_and_fetch(ptr, val);
916 }
917 
918 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
919  int memorder) {
920  (void)memorder;
921  return __sync_or_and_fetch(ptr, val);
922 }
923 
924 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
925  int memorder) {
926  (void)memorder;
927  return __sync_fetch_and_add(ptr, val);
928 }
929 
930 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
931  int memorder) {
932  (void)memorder;
933  return __sync_fetch_and_sub(ptr, val);
934 }
935 
936 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
937  int memorder) {
938  (void)memorder;
939  return __sync_fetch_and_and(ptr, val);
940 }
941 
942 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
943  int memorder) {
944  (void)memorder;
945  return __sync_fetch_and_xor(ptr, val);
946 }
947 
948 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
949  int memorder) {
950  (void)memorder;
951  return __sync_fetch_and_or(ptr, val);
952 }
953 
954 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
955  void* rval = *(void* volatile *)ptr; // NOLINT
956  (void)memorder;
957 #if !defined(__i386__) && !defined(__x86_64__)
958  if (memorder & NLIB_ATOMIC_ACQUIRE)
960 #endif
961  return rval;
962 }
963 
964 static NLIB_ALWAYS_INLINE void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
965  if (memorder == NLIB_ATOMIC_SEQ_CST)
966  __sync_synchronize();
967  else if (memorder & NLIB_ATOMIC_RELEASE)
969  void* tmp = __sync_lock_test_and_set(ptr, val);
970  (void)tmp;
971 }
972 
973 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
974  void* desired, int weak,
975  int success_memorder, int failure_memorder) {
976  void* old = __sync_val_compare_and_swap(ptr, *expected, desired);
977  if (old == *expected) return 1;
978  *expected = old;
979 
980  (void)weak;
981  (void)success_memorder;
982  (void)failure_memorder;
983  return 0;
984 }
985 
986 static __inline void nlib_atomic_thread_fence(int memorder) {
987  switch (memorder) {
988  case NLIB_ATOMIC_RELAXED:
989  break;
990  case NLIB_ATOMIC_ACQUIRE:
992  break;
993  case NLIB_ATOMIC_RELEASE:
995  break;
996  case NLIB_ATOMIC_ACQ_REL:
998  break;
999  default:
1001  break;
1002  }
1003 }
1004 #endif
1005 
1006 
1007 #ifdef __cplusplus
1008 }
1009 #endif
1010 
1011 #endif
1012 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
int32_t nlib_atomic_xor_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_fetch_and64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
void * nlib_atomic_exchangeptr(void **ptr, void *val, int memorder)
Swaps values in an atomic manner. Its behavior is similar to the one for __atomic_exchange_n() of gcc...
static void nlib_spinlock_unlock(nlib_spinlock *lock)
Unlocks the spinlock.
Definition: Platform.h:1739
int nlib_atomic_compare_exchangeptr(void **ptr, void **expected, void *desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int32_t nlib_atomic_load32(const int32_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
#define NLIB_ALWAYS_INLINE
Indicates that the compiler is forced to perform inline expansion of functions.
Definition: Platform_unix.h:97
int64_t nlib_atomic_fetch_add64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
struct nlib_rwlock_ nlib_rwlock
The type for a read-write lock object.
Definition: Platform.h:852
sem_t nlib_semaphore
The type for a semaphore object.
#define NLIB_MEMORY_ORDER_ACQUIRE
A memory fence. Corresponds to atomic_thread_fence(memory_order_acquire) in C++11.
#define NLIB_ATOMIC_RELEASE
Similar to __ATOMIC_RELEASE of gcc or std::memory_order_release of C++11.
int32_t nlib_atomic_or_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
int64_t nlib_atomic_fetch_sub64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int64_t nlib_atomic_and_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange64(int64_t *ptr, int64_t *expected, int64_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
#define NLIB_VIS_PUBLIC
Symbols for functions and classes are made available outside of the library.
Definition: Platform_unix.h:89
int64_t nlib_atomic_fetch_or64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
#define NLIB_ATOMIC_ACQ_REL
Similar to __ATOMIC_ACQ_REL of gcc or std::memory_order_acq_rel of C++11.
#define NLIB_ATOMIC_ACQUIRE
Similar to __ATOMIC_ACQUIRE of gcc or std::memory_order_acquire of C++11.
pthread_key_t nlib_tls
The type for TLS slot IDs.
void * nlib_atomic_loadptr(void *const *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
int64_t nlib_atomic_exchange64(int64_t *ptr, int64_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
#define NLIB_MEMORY_ORDER_SEQ_CST
A memory fence. Corresponds to atomic_thread_fence(memory_order_seq_cst) in C++11.
int64_t nlib_atomic_sub_fetch64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_xor32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
struct nlib_barrier_ nlib_barrier
The type for a barrier object.
Definition: Platform.h:1068
static void nlib_spinlock_init(nlib_spinlock *lock)
Initializes the spinlock.
Definition: Platform.h:1675
#define NLIB_MEMORY_ORDER_ACQ_REL
A memory fence. Corresponds to atomic_thread_fence(memory_order_acq_rel) in C++11.
int32_t nlib_atomic_sub_fetch32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_sub32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int32_t nlib_atomic_add_fetch32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
void nlib_atomic_storeptr(void **ptr, void *val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
static errno_t nlib_spinlock_trylock(nlib_spinlock *lock)
Locks the spinlock. Returns 0 if successful or EBUSY if fails.
Definition: Platform.h:1708
Defines a semaphore.
void nlib_atomic_thread_fence(int memorder)
Places the specified memory barrier.
int32_t nlib_atomic_fetch_and32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
pthread_cond_t nlib_cond
The type for a condition variable object.
int32_t nlib_spinlock
Spinlock variable type. Used by statically initializing with NLIB_SPINLOCK_INITIALIZER.
Definition: Platform.h:1201
#define NLIB_ATOMIC_RELAXED
Similar to __ATOMIC_RELAXED of gcc or std::memory_order_relaxed of C++11.
pthread_mutex_t nlib_mutex
The type for mutex variables.
int64_t nlib_atomic_xor_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
#define NLIB_MEMORY_ORDER_RELEASE
A memory fence. Corresponds to atomic_thread_fence(memory_order_release) in C++11.
int64_t nlib_atomic_add_fetch64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
int32_t nlib_atomic_and_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange32(int32_t *ptr, int32_t *expected, int32_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
void nlib_atomic_store64(int64_t *ptr, int64_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int64_t nlib_atomic_or_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
#define NLIB_ATOMIC_SEQ_CST
Similar to __ATOMIC_SEQ_CST of gcc or std::memory_order_seq_cst of C++11.
int32_t nlib_atomic_fetch_or32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
static void nlib_spinlock_lock(nlib_spinlock *lock)
Locks the spinlock. Behavior is undefined if a recursive lock is performed.
Definition: Platform.h:1678
int64_t nlib_atomic_fetch_xor64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
int32_t nlib_atomic_fetch_add32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
void nlib_atomic_store32(int32_t *ptr, int32_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int32_t nlib_atomic_exchange32(int32_t *ptr, int32_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
pthread_t nlib_thread
The identifier for threads.
int64_t nlib_atomic_load64(const int64_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...