nlib
Platform_unix.h
1 
2 /*--------------------------------------------------------------------------------*
3  Project: CrossRoad
4  Copyright (C)Nintendo All rights reserved.
5 
6  These coded instructions, statements, and computer programs contain proprietary
7  information of Nintendo and/or its licensed developers and are protected by
8  national and international copyright laws. They may not be disclosed to third
9  parties or copied or duplicated in any form, in whole or in part, without the
10  prior written consent of Nintendo.
11 
12  The content herein is highly confidential and should be handled accordingly.
13  *--------------------------------------------------------------------------------*/
14 
15 #pragma once
16 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
17 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
18 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
19 #error do not include directly
20 #endif
21 
22 #if defined(__linux__) || defined(__FreeBSD__) || defined(__CYGWIN__) || \
23  (defined(__APPLE__) && defined(__MACH__))
24 
25 #ifdef __cplusplus
26 // http://cpprefjp.github.io/reference/cstdint.html
27 // Older libc may require __STDC_LIMIT_MACROS and __STDC_CONSTANT_MACROS defined.
28 #ifndef __STDC_LIMIT_MACROS
29 #warning __STDC_LIMIT_MACROS not defined, compile may fail
30 #define __STDC_LIMIT_MACROS
31 #endif
32 #ifndef __STDC_CONSTANT_MACROS
33 #warning __STDC_CONSTANT_MACROS not defined, compile may fail
34 #define __STDC_CONSTANT_MACROS
35 #endif
36 #endif
37 
38 #if defined(__APPLE__) && defined(__MACH__)
39 #define _DARWIN_UNLIMITED_SELECT
40 #include <libkern/OSAtomic.h>
41 #include <errno.h>
42 #if __has_include(<os/lock.h> )
43 #include <os/lock.h>
44 #endif
45 #endif
46 
47 #ifdef __cplusplus
48 extern "C" {
49 #endif
50 
51 #ifndef NLIB_UNIX
52 #define NLIB_UNIX
53 #endif
54 
55 // For now, only supports gcc or clang
56 #if !defined(__GNUC__) && !defined(__clang__)
57 #error
58 #endif
59 
60 #if !defined(__FreeBSD__) && !defined(__APPLE__)
61 // checking __GNU_LIBRARY__, __GLIBC__, __GLIBC_MINOR__ to detect glibc
62 #include <features.h>
63 #endif
64 #include <pthread.h> // for PTHREAD_MUTEX_INITIALIZER, ....
65 #include <semaphore.h> // for sem_t
66 #include <sys/types.h> // for pthread_mutex_t, ....
67 #include <sys/uio.h> // struct iovec
68 #include <fcntl.h>
69 #include <sys/socket.h>
70 #include <netinet/tcp.h> // TCP_FASTOPEN
71 #include <poll.h>
72 #include <netinet/in.h>
73 #include <arpa/inet.h>
74 #include <netdb.h>
75 #include <stdint.h>
76 
77 #if defined(__FreeBSD__) || defined(__APPLE__)
78 #include <dispatch/dispatch.h>
79 #endif
80 
81 #if defined(__i386__) || defined(__x86_64__)
82 #include <x86intrin.h>
83 #endif
84 
85 #ifndef __CYGWIN__
86 #define NLIB_VIS_HIDDEN __attribute__((visibility("hidden")))
87 #define NLIB_VIS_PUBLIC __attribute__((visibility("default")))
88 #define NLIB_WEAKSYMBOL __attribute__((weak))
89 #else
90 #define NLIB_VIS_HIDDEN
91 #define NLIB_VIS_PUBLIC
92 #define NLIB_WEAKSYMBOL
93 #endif
94 
95 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
96 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
97 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
98 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
99 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
100 #if defined(__cplusplus) && __has_cpp_attribute(nodiscard)
101 #define NLIB_CHECK_RESULT [[nodiscard]]
102 #else
103 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
104 #endif
105 #if defined(__cplusplus) && __has_cpp_attribute(noreturn)
106 #define NLIB_NORETURN [[noreturn]]
107 #else
108 #define NLIB_NORETURN __attribute__((noreturn))
109 #endif
110 #if defined(__cplusplus) && __has_cpp_attribute(fallthrough)
111 #define NLIB_FALLTHROUGH [[fallthrough]]
112 #else
113 #define NLIB_FALLTHROUGH /* fall through */
114 #endif
115 #define NLIB_NONNULL __attribute__((nonnull))
116 #define NLIB_NONNULL_1 __attribute__((nonnull(1)))
117 #define NLIB_NONNULL_2 __attribute__((nonnull(2)))
118 #define NLIB_NONNULL_3 __attribute__((nonnull(3)))
119 #define NLIB_NONNULL_4 __attribute__((nonnull(4)))
120 #define NLIB_NONNULL_5 __attribute__((nonnull(5)))
121 #define NLIB_NONNULL_ENABLED
122 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
123 #define NLIB_ATTRIBUTE_PURE __attribute__((pure))
124 #define NLIB_ATTRIBUTE_CONST __attribute__((const))
125 
126 #ifdef __clang__
127 #if __has_attribute(alloc_size)
128 #define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
129 #define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
130 #else
131 #define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
132 #define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
133 #endif
134 #if __has_attribute(alloc_align)
135 #define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
136 #else
137 #define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
138 #endif
139 #if __has_attribute(assume_aligned)
140 #define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
141 #else
142 #define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
143 #endif
144 #else
145 #define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
146 #define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
147 #define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
148 #define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
149 #endif
150 
151 #ifndef NLIB_DEPRECATED
152 #if defined(__cplusplus) && __has_cpp_attribute(deprecated)
153 #define NLIB_DEPRECATED [[deprecated]]
154 #else
155 #define NLIB_DEPRECATED __attribute__((deprecated))
156 #endif
157 #endif
158 #ifndef NLIB_DEPRECATED_MSG
159 #if defined(__cplusplus) && __has_cpp_attribute(deprecated)
160 #define NLIB_DEPRECATED_MSG(msg) [[deprecated(msg)]]
161 #else
162 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated))
163 #endif
164 #endif
165 
166 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN)
167 #define NLIB_LITTLE_ENDIAN
168 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN)
169 #undef NLIB_LITTLE_ENDIAN
170 #else
171 #if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
172 #define NLIB_LITTLE_ENDIAN
173 #else
174 #undef NLIB_LITTLE_ENDIAN
175 #endif
176 #endif
177 #if defined(__x86_64__) || defined(__aarch64__)
178 #define NLIB_64BIT
179 #endif
180 
181 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE)
182 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE)
183 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL)
184 #define NLIB_MEMORY_ORDER_SEQ_CST __atomic_thread_fence(__ATOMIC_SEQ_CST)
185 
186 typedef pthread_key_t nlib_tls;
187 #define NLIB_PTHREAD_nlib_tls_alloc
188 #define NLIB_PTHREAD_nlib_tls_free
189 #define NLIB_PTHREAD_nlib_tls_setvalue
190 #define NLIB_PTHREAD_nlib_tls_getvalue
191 
192 #ifndef _LIBCPP_VERSION
193 NLIB_CAPABILITY("mutex")
194 #endif
195 typedef pthread_mutex_t nlib_mutex;
196 
197 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
198 #ifdef __FreeBSD__
199 // https://stackoverflow.com/questions/10369606/constexpr-pointer-value
200 // ((pthread_mutex_t)1) in FreeBSD
201 #define NLIB_MUTEX_INITIALIZER \
202  (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) \
203  ? PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP \
204  : PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP)
205 #else
206 #define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
207 #endif
208 #else
209 #define NLIB_PTHREAD_nlib_mutex_init
210 #define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER
211 #endif
212 
213 #ifndef __FreeBSD__
214 #define NLIB_PTHREAD_nlib_mutex_lock
215 #define NLIB_PTHREAD_nlib_mutex_unlock
216 #define NLIB_PTHREAD_nlib_mutex_trylock
217 #define NLIB_PTHREAD_nlib_mutex_destroy
218 #endif
219 
220 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP)
221 #define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
222 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
223 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER)
224 #define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
225 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
226 #elif defined(__FreeBSD__)
227 // FreeBSD does not support static initializer for recursive mutex
228 // nlib initializes recursive mutex if the value is 255
229 #define NLIB_RECURSIVE_MUTEX_INITIALIZER \
230  (__builtin_constant_p((pthread_mutex_t)255) ? (pthread_mutex_t)255 : (pthread_mutex_t)255)
231 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER \
232  (__builtin_constant_p((pthread_mutex_t)255) ? (pthread_mutex_t)255 : (pthread_mutex_t)255)
233 #elif defined(NLIB_ALPINE)
234 // hacked...
235 #define NLIB_RECURSIVE_MUTEX_INITIALIZER \
236  { \
237  { \
238  { 1 } \
239  } \
240  }
241 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER \
242  { \
243  { \
244  { 1 } \
245  } \
246  }
247 #else
248 #error Sorry
249 #endif
250 
251 #if defined(__APPLE__)
252 typedef int nlib_semaphore;
253 #else
254 typedef sem_t nlib_semaphore;
255 #endif
256 
257 typedef pthread_cond_t nlib_cond;
258 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER
259 
260 #define NLIB_PTHREAD_nlib_cond_init
261 #define NLIB_PTHREAD_nlib_cond_signal
262 #define NLIB_PTHREAD_nlib_cond_broadcast
263 #define NLIB_PTHREAD_nlib_cond_wait
264 #define NLIB_PTHREAD_nlib_cond_destroy
265 
266 typedef pthread_t nlib_thread;
267 
268 #define NLIB_PTHREAD_nlib_thread_join
269 #define NLIB_PTHREAD_nlib_thread_detach
270 #define NLIB_PTHREAD_nlib_thread_equal
271 #define NLIB_PTHREAD_nlib_thread_self
272 
273 #if defined(__APPLE__)
274 #define NLIB_SPINLOCK_HAS_NATIVE
275 #if __has_include(<os/lock.h> )
276 typedef os_unfair_lock nlib_spinlock;
277 #define NLIB_SPINLOCK_INITIALIZER OS_UNFAIR_LOCK_INIT
279  *lock = OS_UNFAIR_LOCK_INIT;
280 }
282  os_unfair_lock_lock(lock);
283 }
285  return os_unfair_lock_trylock(lock) ? 0 : EBUSY;
286 }
288  os_unfair_lock_unlock(lock);
289 }
290 #else
291 typedef OSSpinLock nlib_spinlock;
292 #define NLIB_SPINLOCK_INITIALIZER (0)
294  *lock = 0;
295 }
297  OSSpinLockLock(lock);
298 }
300  return OSSpinLockTry(lock) ? 0 : EBUSY;
301 }
303  OSSpinLockUnlock(lock);
304 }
305 #endif
306 #endif
307 
308 #ifdef __cplusplus
309 }
310 #endif
311 
312 #if defined(__clang__)
313 #if __has_feature(cxx_unicode_literals)
314 #ifndef __cpp_unicode_characters
315 #define __cpp_unicode_characters 200704L // N2249
316 #endif
317 #endif
318 #if __has_feature(cxx_exceptions)
319 #if __has_feature(cxx_noexcept)
320 #define NLIB_CXX11_NOEXCEPT
321 #endif
322 #else
323 #define NLIB_NOEXCEPT
324 #endif
325 #else
326 // __GNUC__
327 #ifndef __cpp_unicode_characters
328 #define __cpp_unicode_characters 200704L // N2249
329 #endif
330 #define NLIB_CXX11_NOEXCEPT
331 // # if !defined(__CYGWIN__) && (!defined(__GLIBC__) || __GLIBC__ < 2 || __GLIBC_MINOR__ < 1)
332 // # error Sorry, glibc is old.
333 // # endif
334 #endif
335 
336 #define NLIB_ONCE_HAS_NATIVE
337 #define NLIB_TIMESPEC_HAS_NATIVE
338 #define NLIB_IOVEC_HAS_NATIVE
339 
340 #ifdef PTHREAD_RWLOCK_INITIALIZER
341 #define NLIB_RWLOCK_HAS_NATIVE
342 #endif
343 #ifdef NLIB_RWLOCK_HAS_NATIVE
344 #ifndef _LIBCPP_VERSION
345 NLIB_CAPABILITY("mutex")
346 #endif
347 typedef pthread_rwlock_t nlib_rwlock;
348 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER
349 
350 #define NLIB_PTHREAD_nlib_rwlock_init
351 #define NLIB_PTHREAD_nlib_rwlock_destroy
352 #define NLIB_PTHREAD_nlib_rwlock_tryrdlock
353 #define NLIB_PTHREAD_nlib_rwlock_trywrlock
354 #define NLIB_PTHREAD_nlib_rwlock_rdlock
355 #define NLIB_PTHREAD_nlib_rwlock_rdunlock
356 #define NLIB_PTHREAD_nlib_rwlock_wrlock
357 #define NLIB_PTHREAD_nlib_rwlock_wrunlock
358 #endif
359 
360 #ifdef PTHREAD_BARRIER_SERIAL_THREAD
361 #define NLIB_BARRIER_HAS_NATIVE
362 #endif
363 #ifdef NLIB_BARRIER_HAS_NATIVE
364 typedef pthread_barrier_t nlib_barrier;
365 #define NLIB_PTHREAD_nlib_barrier_init
366 #define NLIB_PTHREAD_nlib_barrier_destroy
367 #endif
368 
369 #define NLIB_THREAD_ATTR_HAS_NATIVE
370 
371 #ifndef pthread_cleanup_push
372 #error pthread_cleanup_push must be a macro
373 #endif
374 
375 #ifndef pthread_cleanup_pop
376 #error pthread_cleanup_pop must be a macro
377 #endif
378 
379 #ifdef __linux__
380 // NOTE:
381 // The implementation by timerfd_create() and epoll() consumes file descriptors.
382 // The difference of the maximum numbers of file descriptors might limit the portability.
383 // Also, the performance is not better than our generic version.
384 // #define NLIB_TIMER_HAS_NATIVE
385 #endif
386 
387 #define NLIB_LIBC_nlib_memcmp
388 #define NLIB_LIBC_nlib_strlen
389 #define NLIB_LIBC_nlib_strnlen
390 #if defined(__STDC_LIB_EXT1__)
391 #define NLIB_LIBC_nlib_wcslen
392 #define NLIB_LIBC_nlib_wcsnlen
393 #define NLIB_LIBC_nlib_strncpy
394 #define NLIB_LIBC_nlib_strcpy
395 #define NLIB_LIBC_nlib_wcsncpy
396 #define NLIB_LIBC_nlib_wcscpy
397 #endif
398 #define NLIB_LIBC_nlib_strchr
399 #define NLIB_LIBC_nlib_strrchr
400 
401 #ifdef __cplusplus
402 extern "C" {
403 #endif
404 
405 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED
406 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
407 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE
408 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
409 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
410 
411 #if defined(NLIB_DOXYGEN)
412 int32_t nlib_atomic_load32(const int32_t* ptr, int memorder);
413 void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder);
414 int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val, int memorder);
415 int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected, int32_t desired, int weak,
416  int success_memorder, int failure_memorder);
417 int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val, int memorder);
418 int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val, int memorder);
419 int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val, int memorder);
420 int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val, int memorder);
421 int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val, int memorder);
422 int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val, int memorder);
423 int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val, int memorder);
424 int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val, int memorder);
425 int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val, int memorder);
426 int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val, int memorder);
427 
428 int64_t nlib_atomic_load64(const int64_t* ptr, int memorder);
429 void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder);
430 int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val, int memorder);
431 int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected, int64_t desired, int weak,
432  int success_memorder, int failure_memorder);
433 int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val, int memorder);
434 int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val, int memorder);
435 int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val, int memorder);
436 int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val, int memorder);
437 int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val, int memorder);
438 int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val, int memorder);
439 int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val, int memorder);
440 int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val, int memorder);
441 int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val, int memorder);
442 int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val, int memorder);
443 
444 void* nlib_atomic_loadptr(void* const* ptr, int memorder);
445 void nlib_atomic_storeptr(void** ptr, void* val, int memorder);
446 void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder);
447 int nlib_atomic_compare_exchangeptr(void** ptr, void** expected, void* desired, int weak,
448  int success_memorder, int failure_memorder);
449 void nlib_atomic_thread_fence(int memorder);
450 #endif
451 
452 NLIB_VIS_PUBLIC extern pthread_mutex_t nlib_tsan_lock;
453 #if defined(__has_feature) && __has_feature(thread_sanitizer)
454 #define NLIB_TSAN_LOCK pthread_mutex_lock(&nlib_tsan_lock);
455 #define NLIB_TSAN_UNLOCK pthread_mutex_unlock(&nlib_tsan_lock);
456 #else
457 #define NLIB_TSAN_LOCK
458 #define NLIB_TSAN_UNLOCK
459 #endif
460 
461 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
462  int32_t rval;
463  NLIB_TSAN_LOCK
464  rval = __atomic_load_n(ptr, memorder);
465  NLIB_TSAN_UNLOCK
466  return rval;
467 }
468 
469 static __inline void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder) {
470  NLIB_TSAN_LOCK
471  __atomic_store_n(ptr, val, memorder);
472  NLIB_TSAN_UNLOCK
473 }
474 
475 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val, int memorder) {
476  int32_t rval;
477  NLIB_TSAN_LOCK
478  rval = __atomic_exchange_n(ptr, val, memorder);
479  NLIB_TSAN_UNLOCK
480  return rval;
481 }
482 
483 static __inline int
484 nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected, int32_t desired, int weak,
485  int success_memorder, int failure_memorder) {
486  int rval;
487  NLIB_TSAN_LOCK
488  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak, success_memorder,
489  failure_memorder);
490  NLIB_TSAN_UNLOCK
491  return rval;
492 }
493 
494 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val, int memorder) {
495  int32_t rval;
496  NLIB_TSAN_LOCK
497  rval = __atomic_add_fetch(ptr, val, memorder);
498  NLIB_TSAN_UNLOCK
499  return rval;
500 }
501 
502 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val, int memorder) {
503  int32_t rval;
504  NLIB_TSAN_LOCK
505  rval = __atomic_sub_fetch(ptr, val, memorder);
506  NLIB_TSAN_UNLOCK
507  return rval;
508 }
509 
510 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val, int memorder) {
511  int32_t rval;
512  NLIB_TSAN_LOCK
513  rval = __atomic_and_fetch(ptr, val, memorder);
514  NLIB_TSAN_UNLOCK
515  return rval;
516 }
517 
518 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val, int memorder) {
519  int32_t rval;
520  NLIB_TSAN_LOCK
521  rval = __atomic_xor_fetch(ptr, val, memorder);
522  NLIB_TSAN_UNLOCK
523  return rval;
524 }
525 
526 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val, int memorder) {
527  int32_t rval;
528  NLIB_TSAN_LOCK
529  rval = __atomic_or_fetch(ptr, val, memorder);
530  NLIB_TSAN_UNLOCK
531  return rval;
532 }
533 
534 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val, int memorder) {
535  int32_t rval;
536  NLIB_TSAN_LOCK
537  rval = __atomic_fetch_add(ptr, val, memorder);
538  NLIB_TSAN_UNLOCK
539  return rval;
540 }
541 
542 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val, int memorder) {
543  int32_t rval;
544  NLIB_TSAN_LOCK
545  rval = __atomic_fetch_sub(ptr, val, memorder);
546  NLIB_TSAN_UNLOCK
547  return rval;
548 }
549 
550 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val, int memorder) {
551  int32_t rval;
552  NLIB_TSAN_LOCK
553  rval = __atomic_fetch_and(ptr, val, memorder);
554  NLIB_TSAN_UNLOCK
555  return rval;
556 }
557 
558 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val, int memorder) {
559  int32_t rval;
560  NLIB_TSAN_LOCK
561  rval = __atomic_fetch_xor(ptr, val, memorder);
562  NLIB_TSAN_UNLOCK
563  return rval;
564 }
565 
566 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val, int memorder) {
567  int32_t rval;
568  NLIB_TSAN_LOCK
569  rval = __atomic_fetch_or(ptr, val, memorder);
570  NLIB_TSAN_UNLOCK
571  return rval;
572 }
573 
574 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
575  int64_t rval;
576  NLIB_TSAN_LOCK
577  rval = __atomic_load_n(ptr, memorder);
578  NLIB_TSAN_UNLOCK
579  return rval;
580 }
581 
582 static __inline void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder) {
583  NLIB_TSAN_LOCK
584  __atomic_store_n(ptr, val, memorder);
585  NLIB_TSAN_UNLOCK
586 }
587 
588 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val, int memorder) {
589  int64_t rval;
590  NLIB_TSAN_LOCK
591  rval = __atomic_exchange_n(ptr, val, memorder);
592  NLIB_TSAN_UNLOCK
593  return rval;
594 }
595 
596 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
597  void* rval;
598  NLIB_TSAN_LOCK
599  rval = __atomic_exchange_n(ptr, val, memorder);
600  NLIB_TSAN_UNLOCK
601  return rval;
602 }
603 
604 static __inline int
605 nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected, int64_t desired, int weak,
606  int success_memorder, int failure_memorder) {
607  int rval;
608  NLIB_TSAN_LOCK
609  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak, success_memorder,
610  failure_memorder);
611  NLIB_TSAN_UNLOCK
612  return rval;
613 }
614 
615 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val, int memorder) {
616  int64_t rval;
617  NLIB_TSAN_LOCK
618  rval = __atomic_add_fetch(ptr, val, memorder);
619  NLIB_TSAN_UNLOCK
620  return rval;
621 }
622 
623 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val, int memorder) {
624  int64_t rval;
625  NLIB_TSAN_LOCK
626  rval = __atomic_sub_fetch(ptr, val, memorder);
627  NLIB_TSAN_UNLOCK
628  return rval;
629 }
630 
631 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val, int memorder) {
632  int64_t rval;
633  NLIB_TSAN_LOCK
634  rval = __atomic_and_fetch(ptr, val, memorder);
635  NLIB_TSAN_UNLOCK
636  return rval;
637 }
638 
639 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val, int memorder) {
640  int64_t rval;
641  NLIB_TSAN_LOCK
642  rval = __atomic_xor_fetch(ptr, val, memorder);
643  NLIB_TSAN_UNLOCK
644  return rval;
645 }
646 
647 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val, int memorder) {
648  int64_t rval;
649  NLIB_TSAN_LOCK
650  rval = __atomic_or_fetch(ptr, val, memorder);
651  NLIB_TSAN_UNLOCK
652  return rval;
653 }
654 
655 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val, int memorder) {
656  int64_t rval;
657  NLIB_TSAN_LOCK
658  rval = __atomic_fetch_add(ptr, val, memorder);
659  NLIB_TSAN_UNLOCK
660  return rval;
661 }
662 
663 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val, int memorder) {
664  int64_t rval;
665  NLIB_TSAN_LOCK
666  rval = __atomic_fetch_sub(ptr, val, memorder);
667  NLIB_TSAN_UNLOCK
668  return rval;
669 }
670 
671 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val, int memorder) {
672  int64_t rval;
673  NLIB_TSAN_LOCK
674  rval = __atomic_fetch_and(ptr, val, memorder);
675  NLIB_TSAN_UNLOCK
676  return rval;
677 }
678 
679 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val, int memorder) {
680  int64_t rval;
681  NLIB_TSAN_LOCK
682  rval = __atomic_fetch_xor(ptr, val, memorder);
683  NLIB_TSAN_UNLOCK
684  return rval;
685 }
686 
687 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val, int memorder) {
688  int64_t rval;
689  NLIB_TSAN_LOCK
690  rval = __atomic_fetch_or(ptr, val, memorder);
691  NLIB_TSAN_UNLOCK
692  return rval;
693 }
694 
695 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
696  void* rval;
697  NLIB_TSAN_LOCK
698  rval = __atomic_load_n(ptr, memorder);
699  NLIB_TSAN_UNLOCK
700  return rval;
701 }
702 
703 static __inline void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
704  NLIB_TSAN_LOCK
705  __atomic_store_n(ptr, val, memorder);
706  NLIB_TSAN_UNLOCK
707 }
708 
709 static __inline int
710 nlib_atomic_compare_exchangeptr(void** ptr, void** expected, void* desired, int weak,
711  int success_memorder, int failure_memorder) {
712  int rval;
713  NLIB_TSAN_LOCK
714  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak, success_memorder,
715  failure_memorder);
716  NLIB_TSAN_UNLOCK
717  return rval;
718 }
719 
720 static __inline void nlib_atomic_thread_fence(int memorder) {
721  __atomic_thread_fence(memorder);
722 }
723 
724 #ifdef __cplusplus
725 }
726 #endif
727 
728 #endif
729 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
int32_t nlib_atomic_xor_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_fetch_and64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
void * nlib_atomic_exchangeptr(void **ptr, void *val, int memorder)
Swaps values in an atomic manner. Its behavior is similar to the one for __atomic_exchange_n() of gcc...
static void nlib_spinlock_unlock(nlib_spinlock *lock)
Unlocks the spinlock.
Definition: Platform.h:1739
int nlib_atomic_compare_exchangeptr(void **ptr, void **expected, void *desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int32_t nlib_atomic_load32(const int32_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
#define NLIB_ALWAYS_INLINE
Indicates that the compiler is forced to perform inline expansion of functions.
Definition: Platform_unix.h:95
int64_t nlib_atomic_fetch_add64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
struct nlib_rwlock_ nlib_rwlock
The type for a read-write lock object.
Definition: Platform.h:871
sem_t nlib_semaphore
The type for a semaphore object.
int32_t nlib_atomic_or_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
int64_t nlib_atomic_fetch_sub64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int64_t nlib_atomic_and_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange64(int64_t *ptr, int64_t *expected, int64_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
#define NLIB_VIS_PUBLIC
Symbols for functions and classes are made available outside of the library.
Definition: Platform_unix.h:87
int64_t nlib_atomic_fetch_or64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
pthread_key_t nlib_tls
The type for TLS slot IDs.
void * nlib_atomic_loadptr(void *const *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
int64_t nlib_atomic_exchange64(int64_t *ptr, int64_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
int64_t nlib_atomic_sub_fetch64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_xor32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
struct nlib_barrier_ nlib_barrier
The type for a barrier object.
Definition: Platform.h:1087
static void nlib_spinlock_init(nlib_spinlock *lock)
Initializes the spinlock.
Definition: Platform.h:1675
int32_t nlib_atomic_sub_fetch32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_sub32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int32_t nlib_atomic_add_fetch32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
void nlib_atomic_storeptr(void **ptr, void *val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
static errno_t nlib_spinlock_trylock(nlib_spinlock *lock)
Locks the spinlock. Returns 0 if successful or EBUSY if fails.
Definition: Platform.h:1708
Defines a semaphore.
void nlib_atomic_thread_fence(int memorder)
Places the specified memory barrier.
int32_t nlib_atomic_fetch_and32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
pthread_cond_t nlib_cond
The type for a condition variable object.
int32_t nlib_spinlock
Spinlock variable type. Used by statically initializing with NLIB_SPINLOCK_INITIALIZER.
Definition: Platform.h:1220
pthread_mutex_t nlib_mutex
The type for mutex variables.
int64_t nlib_atomic_xor_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_add_fetch64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
int32_t nlib_atomic_and_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange32(int32_t *ptr, int32_t *expected, int32_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
void nlib_atomic_store64(int64_t *ptr, int64_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int64_t nlib_atomic_or_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
int32_t nlib_atomic_fetch_or32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
static void nlib_spinlock_lock(nlib_spinlock *lock)
Locks the spinlock. Behavior is undefined if a recursive lock is performed.
Definition: Platform.h:1678
int64_t nlib_atomic_fetch_xor64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
int32_t nlib_atomic_fetch_add32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
void nlib_atomic_store32(int32_t *ptr, int32_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int32_t nlib_atomic_exchange32(int32_t *ptr, int32_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
pthread_t nlib_thread
The identifier for threads.
int64_t nlib_atomic_load64(const int64_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...