nlib
Platform_unix.h
1 
2 /*--------------------------------------------------------------------------------*
3  Project: CrossRoad
4  Copyright (C)Nintendo All rights reserved.
5 
6  These coded instructions, statements, and computer programs contain proprietary
7  information of Nintendo and/or its licensed developers and are protected by
8  national and international copyright laws. They may not be disclosed to third
9  parties or copied or duplicated in any form, in whole or in part, without the
10  prior written consent of Nintendo.
11 
12  The content herein is highly confidential and should be handled accordingly.
13  *--------------------------------------------------------------------------------*/
14 
15 #pragma once
16 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
17 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
18 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
19 # error do not include directly
20 #endif
21 
22 #if defined(__linux__) || \
23  defined(__FreeBSD__) || \
24  defined(__CYGWIN__) || \
25  (defined(__APPLE__) && defined(__MACH__))
26 
27 #ifdef __cplusplus
28 // http://cpprefjp.github.io/reference/cstdint.html
29 // Older libc may require __STDC_LIMIT_MACROS and __STDC_CONSTANT_MACROS defined.
30 #ifndef __STDC_LIMIT_MACROS
31 #warning __STDC_LIMIT_MACROS not defined, compile may fail
32 #define __STDC_LIMIT_MACROS
33 #endif
34 #ifndef __STDC_CONSTANT_MACROS
35 #warning __STDC_CONSTANT_MACROS not defined, compile may fail
36 #define __STDC_CONSTANT_MACROS
37 #endif
38 #endif
39 
40 #if defined(__APPLE__) && defined(__MACH__)
41 #define _DARWIN_UNLIMITED_SELECT
42 #include <libkern/OSAtomic.h>
43 #include <errno.h>
44 #if __has_include( <os/lock.h> )
45 #include <os/lock.h>
46 #endif
47 #endif
48 
49 #ifdef __cplusplus
50 extern "C" {
51 #endif
52 
53 #ifndef NLIB_UNIX
54 # define NLIB_UNIX
55 #endif
56 
57 // For now, only supports gcc or clang
58 #if !defined(__GNUC__) && !defined(__clang__)
59 # error
60 #endif
61 
62 #if !defined(__FreeBSD__) && !defined(__APPLE__)
63 // checking __GNU_LIBRARY__, __GLIBC__, __GLIBC_MINOR__ to detect glibc
64 #include <features.h>
65 #endif
66 #include <pthread.h> // for PTHREAD_MUTEX_INITIALIZER, ....
67 #include <semaphore.h> // for sem_t
68 #include <sys/types.h> // for pthread_mutex_t, ....
69 #include <sys/uio.h> // struct iovec
70 #include <fcntl.h> // NOLINT
71 #include <sys/socket.h>
72 #include <netinet/tcp.h> // TCP_FASTOPEN
73 #include <poll.h>
74 #include <netinet/in.h>
75 #include <arpa/inet.h>
76 #include <netdb.h>
77 #include <stdint.h>
78 
79 #if defined(__FreeBSD__) || defined(__APPLE__)
80 #include <dispatch/dispatch.h>
81 #endif
82 
83 #if defined(__i386__) || defined(__x86_64__)
84 # include <x86intrin.h>
85 #endif
86 
87 #ifndef __CYGWIN__
88 # define NLIB_VIS_HIDDEN __attribute__((visibility("hidden")))
89 # define NLIB_VIS_PUBLIC __attribute__((visibility("default")))
90 # define NLIB_WEAKSYMBOL __attribute__((weak))
91 #else
92 # define NLIB_VIS_HIDDEN
93 # define NLIB_VIS_PUBLIC
94 # define NLIB_WEAKSYMBOL
95 #endif
96 
97 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
98 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
99 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
100 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
101 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
102 #if defined(__cplusplus) && __has_cpp_attribute(nodiscard)
103 #define NLIB_CHECK_RESULT [[nodiscard]] // NOLINT
104 #else
105 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
106 #endif
107 #if defined(__cplusplus) && __has_cpp_attribute(noreturn)
108 #define NLIB_NORETURN [[noreturn]] // NOLINT
109 #else
110 #define NLIB_NORETURN __attribute__((noreturn))
111 #endif
112 #if defined(__cplusplus) && __has_cpp_attribute(fallthrough)
113 #define NLIB_FALLTHROUGH [[fallthrough]] // NOLINT
114 #else
115 #define NLIB_FALLTHROUGH
116 #endif
117 #define NLIB_NONNULL __attribute__((nonnull))
118 #define NLIB_NONNULL_1 __attribute__((nonnull (1)))
119 #define NLIB_NONNULL_2 __attribute__((nonnull (2)))
120 #define NLIB_NONNULL_3 __attribute__((nonnull (3)))
121 #define NLIB_NONNULL_4 __attribute__((nonnull (4)))
122 #define NLIB_NONNULL_5 __attribute__((nonnull (5)))
123 #define NLIB_NONNULL_ENABLED
124 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
125 #define NLIB_ATTRIBUTE_PURE __attribute__((pure))
126 #define NLIB_ATTRIBUTE_CONST __attribute__((const))
127 
128 #ifdef __clang__
129 # if __has_attribute(alloc_size)
130 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
131 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
132 # else
133 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
134 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
135 # endif
136 # if __has_attribute(alloc_align)
137 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
138 # else
139 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
140 # endif
141 # if __has_attribute(assume_aligned)
142 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
143 # else
144 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
145 # endif
146 #else
147 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
148 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
149 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
150 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
151 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
152 # else
153 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
154 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
155 # endif
156 #endif
157 
158 #ifndef NLIB_DEPRECATED
159 #if defined(__cplusplus) && __has_cpp_attribute(deprecated)
160 #define NLIB_DEPRECATED [[deprecated]] // NOLINT
161 #else
162 #define NLIB_DEPRECATED __attribute__((deprecated))
163 #endif
164 #endif
165 #ifndef NLIB_DEPRECATED_MSG
166 #if defined(__cplusplus) && __has_cpp_attribute(deprecated)
167 #define NLIB_DEPRECATED_MSG(msg) [[deprecated(msg)]] // NOLINT
168 #else
169 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated))
170 #endif
171 #endif
172 
173 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN)
174 # define NLIB_LITTLE_ENDIAN
175 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN)
176 # undef NLIB_LITTLE_ENDIAN
177 #else
178 # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
179 # define NLIB_LITTLE_ENDIAN
180 # else
181 # undef NLIB_LITTLE_ENDIAN
182 # endif
183 #endif
184 #if defined(__x86_64__) || defined(__aarch64__)
185 # define NLIB_64BIT
186 #endif
187 
188 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE)
189 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE)
190 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL)
191 #define NLIB_MEMORY_ORDER_SEQ_CST __atomic_thread_fence(__ATOMIC_SEQ_CST)
192 
193 typedef pthread_key_t nlib_tls;
194 #define NLIB_PTHREAD_nlib_tls_alloc
195 #define NLIB_PTHREAD_nlib_tls_free
196 #define NLIB_PTHREAD_nlib_tls_setvalue
197 #define NLIB_PTHREAD_nlib_tls_getvalue
198 
199 #ifndef _LIBCPP_VERSION
200 NLIB_CAPABILITY("mutex")
201 #endif
202 typedef pthread_mutex_t nlib_mutex;
203 
204 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
205 # ifdef __FreeBSD__
206 // https://stackoverflow.com/questions/10369606/constexpr-pointer-value
207 // ((pthread_mutex_t)1) in FreeBSD
208 # define NLIB_MUTEX_INITIALIZER (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) ? \
209  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP : \
210  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP)
211 # else
212 # define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
213 # endif
214 #else
215 # define NLIB_PTHREAD_nlib_mutex_init
216 # define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER
217 #endif
218 
219 #ifndef __FreeBSD__
220 #define NLIB_PTHREAD_nlib_mutex_lock
221 #define NLIB_PTHREAD_nlib_mutex_unlock
222 #define NLIB_PTHREAD_nlib_mutex_trylock
223 #define NLIB_PTHREAD_nlib_mutex_destroy
224 #endif
225 
226 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP)
227 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
228 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
229 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER)
230 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
231 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
232 #elif defined(__FreeBSD__)
233 // FreeBSD does not support static initializer for recursive mutex
234 // nlib initializes recursive mutex if the value is 255
235 # define NLIB_RECURSIVE_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
236  (pthread_mutex_t)255 : (pthread_mutex_t)255)
237 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
238  (pthread_mutex_t)255 : (pthread_mutex_t)255)
239 #elif defined(NLIB_ALPINE)
240 // hacked...
241 #define NLIB_RECURSIVE_MUTEX_INITIALIZER {{{1}}}
242 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER {{{1}}}
243 #else
244 # error Sorry
245 #endif
246 
247 #if defined(__APPLE__)
248 typedef int nlib_semaphore;
249 #else
250 typedef sem_t nlib_semaphore;
251 #endif
252 
253 typedef pthread_cond_t nlib_cond;
254 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER
255 
256 #define NLIB_PTHREAD_nlib_cond_init
257 #define NLIB_PTHREAD_nlib_cond_signal
258 #define NLIB_PTHREAD_nlib_cond_broadcast
259 #define NLIB_PTHREAD_nlib_cond_wait
260 #define NLIB_PTHREAD_nlib_cond_destroy
261 
262 typedef pthread_t nlib_thread;
263 
264 #define NLIB_PTHREAD_nlib_thread_join
265 #define NLIB_PTHREAD_nlib_thread_detach
266 #define NLIB_PTHREAD_nlib_thread_equal
267 #define NLIB_PTHREAD_nlib_thread_self
268 
269 #if defined(__APPLE__)
270 #define NLIB_SPINLOCK_HAS_NATIVE
271 #if __has_include( <os/lock.h> )
272 typedef os_unfair_lock nlib_spinlock;
273 #define NLIB_SPINLOCK_INITIALIZER OS_UNFAIR_LOCK_INIT
274 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
275  *lock = OS_UNFAIR_LOCK_INIT;
276 }
277 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
278  os_unfair_lock_lock(lock);
279 }
280 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
281  return os_unfair_lock_trylock(lock) ? 0 : EBUSY;
282 }
283 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
284  os_unfair_lock_unlock(lock);
285 }
286 #else
287 typedef OSSpinLock nlib_spinlock;
288 #define NLIB_SPINLOCK_INITIALIZER (0)
289 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
290  *lock = 0;
291 }
292 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
293  OSSpinLockLock(lock);
294 }
295 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
296  return OSSpinLockTry(lock) ? 0 : EBUSY;
297 }
298 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
299  OSSpinLockUnlock(lock);
300 }
301 #endif
302 #endif
303 
304 #ifdef __cplusplus
305 }
306 #endif
307 
308 #if defined(__clang__)
309 # if __has_feature(cxx_unicode_literals)
310 # define NLIB_CXX11_NEW_CHARACTER_TYPES
311 # endif
312 # if __has_feature(cxx_exceptions)
313 # if __has_feature(cxx_noexcept)
314 # define NLIB_CXX11_NOEXCEPT
315 # endif
316 # else
317 # define NLIB_NOEXCEPT
318 # endif
319 #else
320 // __GNUC__
321 # define NLIB_CXX11_NEW_CHARACTER_TYPES
322 # ifndef __cpp_unicode_characters
323 # define __cpp_unicode_characters 200704L // N2249
324 # endif
325 # define NLIB_CXX11_NOEXCEPT
326 // # if !defined(__CYGWIN__) && (!defined(__GLIBC__) || __GLIBC__ < 2 || __GLIBC_MINOR__ < 1)
327 // # error Sorry, glibc is old.
328 // # endif
329 #endif
330 
331 #define NLIB_ONCE_HAS_NATIVE
332 #define NLIB_TIMESPEC_HAS_NATIVE
333 #define NLIB_IOVEC_HAS_NATIVE
334 
335 #ifdef PTHREAD_RWLOCK_INITIALIZER
336 # define NLIB_RWLOCK_HAS_NATIVE
337 #endif
338 #ifdef NLIB_RWLOCK_HAS_NATIVE
339 #ifndef _LIBCPP_VERSION
340 NLIB_CAPABILITY("mutex")
341 #endif
342 typedef pthread_rwlock_t nlib_rwlock;
343 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER
344 
345 #define NLIB_PTHREAD_nlib_rwlock_init
346 #define NLIB_PTHREAD_nlib_rwlock_destroy
347 #define NLIB_PTHREAD_nlib_rwlock_tryrdlock
348 #define NLIB_PTHREAD_nlib_rwlock_trywrlock
349 #define NLIB_PTHREAD_nlib_rwlock_rdlock
350 #define NLIB_PTHREAD_nlib_rwlock_rdunlock
351 #define NLIB_PTHREAD_nlib_rwlock_wrlock
352 #define NLIB_PTHREAD_nlib_rwlock_wrunlock
353 #endif
354 
355 #ifdef PTHREAD_BARRIER_SERIAL_THREAD
356 # define NLIB_BARRIER_HAS_NATIVE
357 #endif
358 #ifdef NLIB_BARRIER_HAS_NATIVE
359 typedef pthread_barrier_t nlib_barrier;
360 #define NLIB_PTHREAD_nlib_barrier_init
361 #define NLIB_PTHREAD_nlib_barrier_destroy
362 #endif
363 
364 #define NLIB_THREAD_ATTR_HAS_NATIVE
365 
366 #ifndef pthread_cleanup_push
367 # error pthread_cleanup_push must be a macro
368 #endif
369 
370 #ifndef pthread_cleanup_pop
371 # error pthread_cleanup_pop must be a macro
372 #endif
373 
374 #ifdef __linux__
375 // NOTE:
376 // The implementation by timerfd_create() and epoll() consumes file descriptors.
377 // The difference of the maximum numbers of file descriptors might limit the portability.
378 // Also, the performance is not better than our generic version.
379 // #define NLIB_TIMER_HAS_NATIVE
380 #endif
381 
382 #define NLIB_LIBC_nlib_memcmp
383 #define NLIB_LIBC_nlib_strlen
384 #define NLIB_LIBC_nlib_strnlen
385 #if defined(__STDC_LIB_EXT1__)
386 # define NLIB_LIBC_nlib_wcslen
387 # define NLIB_LIBC_nlib_wcsnlen
388 # define NLIB_LIBC_nlib_strncpy
389 # define NLIB_LIBC_nlib_strcpy
390 # define NLIB_LIBC_nlib_wcsncpy
391 # define NLIB_LIBC_nlib_wcscpy
392 #endif
393 #define NLIB_LIBC_nlib_strchr
394 #define NLIB_LIBC_nlib_strrchr
395 
396 #ifdef __cplusplus
397 extern "C" {
398 #endif
399 
400 #if (defined(__clang__) && defined(NLIB_64BIT)) || \
401  (defined(__GNUC__) && __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
402 
403 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED
404 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
405 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE
406 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
407 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
408 
409 #if defined(NLIB_DOXYGEN)
410 int32_t nlib_atomic_load32(const int32_t* ptr, int memorder);
411 void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder);
412 int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val, int memorder);
413 int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
414  int32_t desired, int weak,
415  int success_memorder, int failure_memorder);
416 int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val, int memorder);
417 int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val, int memorder);
418 int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val, int memorder);
419 int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val, int memorder);
420 int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val, int memorder);
421 int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val, int memorder);
422 int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val, int memorder);
423 int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val, int memorder);
424 int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val, int memorder);
425 int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val, int memorder);
426 
427 int64_t nlib_atomic_load64(const int64_t* ptr, int memorder);
428 void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder);
429 int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val, int memorder);
430 int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
431  int64_t desired, int weak,
432  int success_memorder, int failure_memorder);
433 int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val, int memorder);
434 int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val, int memorder);
435 int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val, int memorder);
436 int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val, int memorder);
437 int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val, int memorder);
438 int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val, int memorder);
439 int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val, int memorder);
440 int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val, int memorder);
441 int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val, int memorder);
442 int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val, int memorder);
443 
444 void* nlib_atomic_loadptr(void* const* ptr, int memorder);
445 void nlib_atomic_storeptr(void** ptr, void* val, int memorder);
446 void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder);
447 int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
448  void* desired, int weak,
449  int success_memorder, int failure_memorder);
450 void nlib_atomic_thread_fence(int memorder);
451 #endif
452 
453 NLIB_VIS_PUBLIC extern pthread_mutex_t nlib_tsan_lock;
454 #if defined(__has_feature) && __has_feature(thread_sanitizer)
455 #define NLIB_TSAN_LOCK pthread_mutex_lock(&nlib_tsan_lock);
456 #define NLIB_TSAN_UNLOCK pthread_mutex_unlock(&nlib_tsan_lock);
457 #else
458 #define NLIB_TSAN_LOCK
459 #define NLIB_TSAN_UNLOCK
460 #endif
461 
462 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
463  int32_t rval;
464  NLIB_TSAN_LOCK
465  rval = __atomic_load_n(ptr, memorder);
466  NLIB_TSAN_UNLOCK
467  return rval;
468 }
469 
470 static __inline void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder) {
471  NLIB_TSAN_LOCK
472  __atomic_store_n(ptr, val, memorder);
473  NLIB_TSAN_UNLOCK
474 }
475 
476 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
477  int memorder) {
478  int32_t rval;
479  NLIB_TSAN_LOCK
480  rval = __atomic_exchange_n(ptr, val, memorder);
481  NLIB_TSAN_UNLOCK
482  return rval;
483 }
484 
485 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
486  int32_t desired, int weak,
487  int success_memorder, int failure_memorder) {
488  int rval;
489  NLIB_TSAN_LOCK
490  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
491  success_memorder, failure_memorder);
492  NLIB_TSAN_UNLOCK
493  return rval;
494 }
495 
496 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
497  int memorder) {
498  int32_t rval;
499  NLIB_TSAN_LOCK
500  rval = __atomic_add_fetch(ptr, val, memorder);
501  NLIB_TSAN_UNLOCK
502  return rval;
503 }
504 
505 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
506  int memorder) {
507  int32_t rval;
508  NLIB_TSAN_LOCK
509  rval = __atomic_sub_fetch(ptr, val, memorder);
510  NLIB_TSAN_UNLOCK
511  return rval;
512 }
513 
514 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
515  int memorder) {
516  int32_t rval;
517  NLIB_TSAN_LOCK
518  rval = __atomic_and_fetch(ptr, val, memorder);
519  NLIB_TSAN_UNLOCK
520  return rval;
521 }
522 
523 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
524  int memorder) {
525  int32_t rval;
526  NLIB_TSAN_LOCK
527  rval = __atomic_xor_fetch(ptr, val, memorder);
528  NLIB_TSAN_UNLOCK
529  return rval;
530 }
531 
532 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
533  int memorder) {
534  int32_t rval;
535  NLIB_TSAN_LOCK
536  rval = __atomic_or_fetch(ptr, val, memorder);
537  NLIB_TSAN_UNLOCK
538  return rval;
539 }
540 
541 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
542  int memorder) {
543  int32_t rval;
544  NLIB_TSAN_LOCK
545  rval = __atomic_fetch_add(ptr, val, memorder);
546  NLIB_TSAN_UNLOCK
547  return rval;
548 }
549 
550 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
551  int memorder) {
552  int32_t rval;
553  NLIB_TSAN_LOCK
554  rval = __atomic_fetch_sub(ptr, val, memorder);
555  NLIB_TSAN_UNLOCK
556  return rval;
557 }
558 
559 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
560  int memorder) {
561  int32_t rval;
562  NLIB_TSAN_LOCK
563  rval = __atomic_fetch_and(ptr, val, memorder);
564  NLIB_TSAN_UNLOCK
565  return rval;
566 }
567 
568 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
569  int memorder) {
570  int32_t rval;
571  NLIB_TSAN_LOCK
572  rval = __atomic_fetch_xor(ptr, val, memorder);
573  NLIB_TSAN_UNLOCK
574  return rval;
575 }
576 
577 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
578  int memorder) {
579  int32_t rval;
580  NLIB_TSAN_LOCK
581  rval = __atomic_fetch_or(ptr, val, memorder);
582  NLIB_TSAN_UNLOCK
583  return rval;
584 }
585 
586 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
587  int64_t rval;
588  NLIB_TSAN_LOCK
589  rval = __atomic_load_n(ptr, memorder);
590  NLIB_TSAN_UNLOCK
591  return rval;
592 }
593 
594 static __inline void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder) {
595  NLIB_TSAN_LOCK
596  __atomic_store_n(ptr, val, memorder);
597  NLIB_TSAN_UNLOCK
598 }
599 
600 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
601  int memorder) {
602  int64_t rval;
603  NLIB_TSAN_LOCK
604  rval = __atomic_exchange_n(ptr, val, memorder);
605  NLIB_TSAN_UNLOCK
606  return rval;
607 }
608 
609 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
610  void* rval;
611  NLIB_TSAN_LOCK
612  rval = __atomic_exchange_n(ptr, val, memorder);
613  NLIB_TSAN_UNLOCK
614  return rval;
615 }
616 
617 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
618  int64_t desired, int weak,
619  int success_memorder, int failure_memorder) {
620  int rval;
621  NLIB_TSAN_LOCK
622  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
623  success_memorder, failure_memorder);
624  NLIB_TSAN_UNLOCK
625  return rval;
626 }
627 
628 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
629  int memorder) {
630  int64_t rval;
631  NLIB_TSAN_LOCK
632  rval = __atomic_add_fetch(ptr, val, memorder);
633  NLIB_TSAN_UNLOCK
634  return rval;
635 }
636 
637 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
638  int memorder) {
639  int64_t rval;
640  NLIB_TSAN_LOCK
641  rval = __atomic_sub_fetch(ptr, val, memorder);
642  NLIB_TSAN_UNLOCK
643  return rval;
644 }
645 
646 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
647  int memorder) {
648  int64_t rval;
649  NLIB_TSAN_LOCK
650  rval = __atomic_and_fetch(ptr, val, memorder);
651  NLIB_TSAN_UNLOCK
652  return rval;
653 }
654 
655 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
656  int memorder) {
657  int64_t rval;
658  NLIB_TSAN_LOCK
659  rval = __atomic_xor_fetch(ptr, val, memorder);
660  NLIB_TSAN_UNLOCK
661  return rval;
662 }
663 
664 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
665  int memorder) {
666  int64_t rval;
667  NLIB_TSAN_LOCK
668  rval = __atomic_or_fetch(ptr, val, memorder);
669  NLIB_TSAN_UNLOCK
670  return rval;
671 }
672 
673 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
674  int memorder) {
675  int64_t rval;
676  NLIB_TSAN_LOCK
677  rval = __atomic_fetch_add(ptr, val, memorder);
678  NLIB_TSAN_UNLOCK
679  return rval;
680 }
681 
682 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
683  int memorder) {
684  int64_t rval;
685  NLIB_TSAN_LOCK
686  rval = __atomic_fetch_sub(ptr, val, memorder);
687  NLIB_TSAN_UNLOCK
688  return rval;
689 }
690 
691 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
692  int memorder) {
693  int64_t rval;
694  NLIB_TSAN_LOCK
695  rval = __atomic_fetch_and(ptr, val, memorder);
696  NLIB_TSAN_UNLOCK
697  return rval;
698 }
699 
700 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
701  int memorder) {
702  int64_t rval;
703  NLIB_TSAN_LOCK
704  rval = __atomic_fetch_xor(ptr, val, memorder);
705  NLIB_TSAN_UNLOCK
706  return rval;
707 }
708 
709 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
710  int memorder) {
711  int64_t rval;
712  NLIB_TSAN_LOCK
713  rval = __atomic_fetch_or(ptr, val, memorder);
714  NLIB_TSAN_UNLOCK
715  return rval;
716 }
717 
718 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
719  void* rval;
720  NLIB_TSAN_LOCK
721  rval = __atomic_load_n(ptr, memorder);
722  NLIB_TSAN_UNLOCK
723  return rval;
724 }
725 
726 static __inline void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
727  NLIB_TSAN_LOCK
728  __atomic_store_n(ptr, val, memorder);
729  NLIB_TSAN_UNLOCK
730 }
731 
732 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
733  void* desired, int weak,
734  int success_memorder, int failure_memorder) {
735  int rval;
736  NLIB_TSAN_LOCK
737  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
738  success_memorder, failure_memorder);
739  NLIB_TSAN_UNLOCK
740  return rval;
741 }
742 
743 static __inline void nlib_atomic_thread_fence(int memorder) {
744  __atomic_thread_fence(memorder);
745 }
746 
747 #else
748 #define NLIB_ATOMIC_RELAXED 0
749 #define NLIB_ATOMIC_ACQUIRE 1
750 #define NLIB_ATOMIC_RELEASE 2
751 #define NLIB_ATOMIC_ACQ_REL 3
752 #define NLIB_ATOMIC_SEQ_CST 7
753 
754 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
755  int32_t rval = *(volatile int32_t*)ptr;
756  (void)memorder;
757 #if !defined(__i386__) && !defined(__x86_64__)
758  if (memorder & NLIB_ATOMIC_ACQUIRE)
760 #endif
761  return rval;
762 }
763 
764 static NLIB_ALWAYS_INLINE void nlib_atomic_store32(int32_t* ptr, int32_t val,
765  int memorder) {
766  if (memorder == NLIB_ATOMIC_SEQ_CST)
767  __sync_synchronize();
768  else if (memorder & NLIB_ATOMIC_RELEASE)
770  __sync_lock_test_and_set(ptr, val);
771 }
772 
773 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
774  int memorder) {
775  if (memorder == NLIB_ATOMIC_SEQ_CST)
776  __sync_synchronize();
777  else if (memorder & NLIB_ATOMIC_RELEASE)
779  return __sync_lock_test_and_set(ptr, val);
780 }
781 
782 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
783  int32_t desired, int weak,
784  int success_memorder, int failure_memorder) {
785  int32_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
786  if (old == *expected) return 1;
787  *expected = old;
788 
789  (void)weak;
790  (void)success_memorder;
791  (void)failure_memorder;
792  return 0;
793 }
794 
795 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
796  int memorder) {
797  (void)memorder;
798  return __sync_add_and_fetch(ptr, val);
799 }
800 
801 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
802  int memorder) {
803  (void)memorder;
804  return __sync_sub_and_fetch(ptr, val);
805 }
806 
807 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
808  int memorder) {
809  (void)memorder;
810  return __sync_and_and_fetch(ptr, val);
811 }
812 
813 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
814  int memorder) {
815  (void)memorder;
816  return __sync_xor_and_fetch(ptr, val);
817 }
818 
819 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
820  int memorder) {
821  (void)memorder;
822  return __sync_or_and_fetch(ptr, val);
823 }
824 
825 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
826  int memorder) {
827  (void)memorder;
828  return __sync_fetch_and_add(ptr, val);
829 }
830 
831 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
832  int memorder) {
833  (void)memorder;
834  return __sync_fetch_and_sub(ptr, val);
835 }
836 
837 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
838  int memorder) {
839  (void)memorder;
840  return __sync_fetch_and_and(ptr, val);
841 }
842 
843 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
844  int memorder) {
845  (void)memorder;
846  return __sync_fetch_and_xor(ptr, val);
847 }
848 
849 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
850  int memorder) {
851  (void)memorder;
852  return __sync_fetch_and_or(ptr, val);
853 }
854 
855 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
856  int64_t rval = *(volatile int64_t*)ptr;
857  (void)memorder;
858 #if !defined(__i386__) && !defined(__x86_64__)
859  if (memorder & NLIB_ATOMIC_ACQUIRE)
861 #endif
862  return rval;
863 }
864 
865 static NLIB_ALWAYS_INLINE void nlib_atomic_store64(int64_t* ptr, int64_t val,
866  int memorder) {
867  if (memorder == NLIB_ATOMIC_SEQ_CST)
868  __sync_synchronize();
869  else if (memorder & NLIB_ATOMIC_RELEASE)
871  __sync_lock_test_and_set(ptr, val);
872 }
873 
874 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
875  int memorder) {
876  if (memorder == NLIB_ATOMIC_SEQ_CST)
877  __sync_synchronize();
878  else if (memorder & NLIB_ATOMIC_RELEASE)
880  return __sync_lock_test_and_set(ptr, val);
881 }
882 
883 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
884  if (memorder == NLIB_ATOMIC_SEQ_CST)
885  __sync_synchronize();
886  else if (memorder & NLIB_ATOMIC_RELEASE)
888  return __sync_lock_test_and_set(ptr, val);
889 }
890 
891 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
892  int64_t desired, int weak,
893  int success_memorder, int failure_memorder) {
894  int64_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
895  if (old == *expected) return 1;
896  *expected = old;
897 
898  (void)weak;
899  (void)success_memorder;
900  (void)failure_memorder;
901  return 0;
902 }
903 
904 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
905  int memorder) {
906  (void)memorder;
907  return __sync_add_and_fetch(ptr, val);
908 }
909 
910 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
911  int memorder) {
912  (void)memorder;
913  return __sync_sub_and_fetch(ptr, val);
914 }
915 
916 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
917  int memorder) {
918  (void)memorder;
919  return __sync_and_and_fetch(ptr, val);
920 }
921 
922 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
923  int memorder) {
924  (void)memorder;
925  return __sync_xor_and_fetch(ptr, val);
926 }
927 
928 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
929  int memorder) {
930  (void)memorder;
931  return __sync_or_and_fetch(ptr, val);
932 }
933 
934 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
935  int memorder) {
936  (void)memorder;
937  return __sync_fetch_and_add(ptr, val);
938 }
939 
940 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
941  int memorder) {
942  (void)memorder;
943  return __sync_fetch_and_sub(ptr, val);
944 }
945 
946 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
947  int memorder) {
948  (void)memorder;
949  return __sync_fetch_and_and(ptr, val);
950 }
951 
952 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
953  int memorder) {
954  (void)memorder;
955  return __sync_fetch_and_xor(ptr, val);
956 }
957 
958 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
959  int memorder) {
960  (void)memorder;
961  return __sync_fetch_and_or(ptr, val);
962 }
963 
964 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
965  void* rval = *(void* volatile *)ptr; // NOLINT
966  (void)memorder;
967 #if !defined(__i386__) && !defined(__x86_64__)
968  if (memorder & NLIB_ATOMIC_ACQUIRE)
970 #endif
971  return rval;
972 }
973 
974 static NLIB_ALWAYS_INLINE void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
975  if (memorder == NLIB_ATOMIC_SEQ_CST)
976  __sync_synchronize();
977  else if (memorder & NLIB_ATOMIC_RELEASE)
979  void* tmp = __sync_lock_test_and_set(ptr, val);
980  (void)tmp;
981 }
982 
983 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
984  void* desired, int weak,
985  int success_memorder, int failure_memorder) {
986  void* old = __sync_val_compare_and_swap(ptr, *expected, desired);
987  if (old == *expected) return 1;
988  *expected = old;
989 
990  (void)weak;
991  (void)success_memorder;
992  (void)failure_memorder;
993  return 0;
994 }
995 
996 static __inline void nlib_atomic_thread_fence(int memorder) {
997  switch (memorder) {
998  case NLIB_ATOMIC_RELAXED:
999  break;
1000  case NLIB_ATOMIC_ACQUIRE:
1002  break;
1003  case NLIB_ATOMIC_RELEASE:
1005  break;
1006  case NLIB_ATOMIC_ACQ_REL:
1008  break;
1009  default:
1011  break;
1012  }
1013 }
1014 #endif
1015 
1016 
1017 #ifdef __cplusplus
1018 }
1019 #endif
1020 
1021 #endif
1022 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
int32_t nlib_atomic_xor_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_fetch_and64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
void * nlib_atomic_exchangeptr(void **ptr, void *val, int memorder)
Swaps values in an atomic manner. Its behavior is similar to the one for __atomic_exchange_n() of gcc...
static void nlib_spinlock_unlock(nlib_spinlock *lock)
Unlocks the spinlock.
Definition: Platform.h:1762
int nlib_atomic_compare_exchangeptr(void **ptr, void **expected, void *desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int32_t nlib_atomic_load32(const int32_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
#define NLIB_ALWAYS_INLINE
Indicates that the compiler is forced to perform inline expansion of functions.
Definition: Platform_unix.h:97
int64_t nlib_atomic_fetch_add64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
struct nlib_rwlock_ nlib_rwlock
The type for a read-write lock object.
Definition: Platform.h:875
sem_t nlib_semaphore
The type for a semaphore object.
#define NLIB_MEMORY_ORDER_ACQUIRE
A memory fence. Corresponds to atomic_thread_fence(memory_order_acquire) in C++11.
#define NLIB_ATOMIC_RELEASE
Similar to __ATOMIC_RELEASE of gcc or std::memory_order_release of C++11.
int32_t nlib_atomic_or_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
int64_t nlib_atomic_fetch_sub64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int64_t nlib_atomic_and_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange64(int64_t *ptr, int64_t *expected, int64_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
#define NLIB_VIS_PUBLIC
Symbols for functions and classes are made available outside of the library.
Definition: Platform_unix.h:89
int64_t nlib_atomic_fetch_or64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
#define NLIB_ATOMIC_ACQ_REL
Similar to __ATOMIC_ACQ_REL of gcc or std::memory_order_acq_rel of C++11.
#define NLIB_ATOMIC_ACQUIRE
Similar to __ATOMIC_ACQUIRE of gcc or std::memory_order_acquire of C++11.
pthread_key_t nlib_tls
The type for TLS slot IDs.
void * nlib_atomic_loadptr(void *const *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
int64_t nlib_atomic_exchange64(int64_t *ptr, int64_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
#define NLIB_MEMORY_ORDER_SEQ_CST
A memory fence. Corresponds to atomic_thread_fence(memory_order_seq_cst) in C++11.
int64_t nlib_atomic_sub_fetch64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_xor32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
struct nlib_barrier_ nlib_barrier
The type for a barrier object.
Definition: Platform.h:1091
static void nlib_spinlock_init(nlib_spinlock *lock)
Initializes the spinlock.
Definition: Platform.h:1698
#define NLIB_MEMORY_ORDER_ACQ_REL
A memory fence. Corresponds to atomic_thread_fence(memory_order_acq_rel) in C++11.
int32_t nlib_atomic_sub_fetch32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_sub32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int32_t nlib_atomic_add_fetch32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
void nlib_atomic_storeptr(void **ptr, void *val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
static errno_t nlib_spinlock_trylock(nlib_spinlock *lock)
Locks the spinlock. Returns 0 if successful or EBUSY if fails.
Definition: Platform.h:1731
Defines a semaphore.
void nlib_atomic_thread_fence(int memorder)
Places the specified memory barrier.
int32_t nlib_atomic_fetch_and32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
pthread_cond_t nlib_cond
The type for a condition variable object.
int32_t nlib_spinlock
Spinlock variable type. Used by statically initializing with NLIB_SPINLOCK_INITIALIZER.
Definition: Platform.h:1224
#define NLIB_ATOMIC_RELAXED
Similar to __ATOMIC_RELAXED of gcc or std::memory_order_relaxed of C++11.
pthread_mutex_t nlib_mutex
The type for mutex variables.
int64_t nlib_atomic_xor_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
#define NLIB_MEMORY_ORDER_RELEASE
A memory fence. Corresponds to atomic_thread_fence(memory_order_release) in C++11.
int64_t nlib_atomic_add_fetch64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
int32_t nlib_atomic_and_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange32(int32_t *ptr, int32_t *expected, int32_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
void nlib_atomic_store64(int64_t *ptr, int64_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int64_t nlib_atomic_or_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
#define NLIB_ATOMIC_SEQ_CST
Similar to __ATOMIC_SEQ_CST of gcc or std::memory_order_seq_cst of C++11.
int32_t nlib_atomic_fetch_or32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
static void nlib_spinlock_lock(nlib_spinlock *lock)
Locks the spinlock. Behavior is undefined if a recursive lock is performed.
Definition: Platform.h:1701
int64_t nlib_atomic_fetch_xor64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
int32_t nlib_atomic_fetch_add32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
void nlib_atomic_store32(int32_t *ptr, int32_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int32_t nlib_atomic_exchange32(int32_t *ptr, int32_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
pthread_t nlib_thread
The identifier for threads.
int64_t nlib_atomic_load64(const int64_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...