16 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_ 17 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_ 18 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_ 19 # error do not include directly 22 #if defined(__linux__) || \ 23 defined(__FreeBSD__) || \ 24 defined(__CYGWIN__) || \ 25 (defined(__APPLE__) && defined(__MACH__)) 30 #ifndef __STDC_LIMIT_MACROS 31 #warning __STDC_LIMIT_MACROS not defined, compile may fail 32 #define __STDC_LIMIT_MACROS 34 #ifndef __STDC_CONSTANT_MACROS 35 #warning __STDC_CONSTANT_MACROS not defined, compile may fail 36 #define __STDC_CONSTANT_MACROS 40 #if defined(__APPLE__) && defined(__MACH__) 41 #define _DARWIN_UNLIMITED_SELECT 42 #include <libkern/OSAtomic.h> 44 #if __has_include( <os/lock.h> ) 58 #if !defined(__GNUC__) && !defined(__clang__) 62 #if !defined(__FreeBSD__) && !defined(__APPLE__) 68 #include <sys/types.h> 71 #include <sys/socket.h> 72 #include <netinet/tcp.h> 74 #include <netinet/in.h> 75 #include <arpa/inet.h> 79 #if defined(__FreeBSD__) || defined(__APPLE__) 80 #include <dispatch/dispatch.h> 83 #if defined(__i386__) || defined(__x86_64__) 84 # include <x86intrin.h> 88 # define NLIB_VIS_HIDDEN __attribute__((visibility("hidden"))) 89 # define NLIB_VIS_PUBLIC __attribute__((visibility("default"))) 90 # define NLIB_WEAKSYMBOL __attribute__((weak)) 92 # define NLIB_VIS_HIDDEN 93 # define NLIB_VIS_PUBLIC 94 # define NLIB_WEAKSYMBOL 97 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline)) 98 #define NLIB_NEVER_INLINE __attribute__((__noinline__)) 99 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1) 100 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0) 101 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value)) 102 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result)) 103 #define NLIB_NORETURN __attribute__((noreturn)) 104 #define NLIB_NONNULL __attribute__((nonnull)) 105 #define NLIB_NONNULL_1 __attribute__((nonnull (1))) 106 #define NLIB_NONNULL_2 __attribute__((nonnull (2))) 107 #define NLIB_NONNULL_3 __attribute__((nonnull (3))) 108 #define NLIB_NONNULL_4 __attribute__((nonnull (4))) 109 #define NLIB_NONNULL_5 __attribute__((nonnull (5))) 110 #define NLIB_NONNULL_ENABLED 111 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc)) 112 #define NLIB_ATTRIBUTE_PURE __attribute__((pure)) 113 #define NLIB_ATTRIBUTE_CONST __attribute__((const)) 116 # if __has_attribute(alloc_size) 117 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n))) 118 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1))) 120 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) 121 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) 123 # if __has_attribute(alloc_align) 124 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn))) 126 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) 128 # if __has_attribute(assume_aligned) 129 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n))) 131 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) 134 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n))) 135 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1))) 136 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9) 137 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn))) 138 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n))) 140 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) 141 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) 145 #ifndef NLIB_DEPRECATED 146 #define NLIB_DEPRECATED __attribute__((deprecated)) 148 #ifndef NLIB_DEPRECATED_MSG 149 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated)) 152 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN) 153 # define NLIB_LITTLE_ENDIAN 154 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN) 155 # undef NLIB_LITTLE_ENDIAN 157 # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ 158 # define NLIB_LITTLE_ENDIAN 160 # undef NLIB_LITTLE_ENDIAN 163 #if defined(__x86_64__) || defined(__aarch64__) 167 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7 168 # if !defined(__i386__) && !defined(__x86_64__) 171 #define NLIB_MEMORY_ORDER_RELEASE __asm__ __volatile__("sfence": : :"memory") 172 #define NLIB_MEMORY_ORDER_ACQUIRE __asm__ __volatile__("lfence": : :"memory") 173 #define NLIB_MEMORY_ORDER_ACQ_REL __asm__ __volatile__("mfence": : :"memory") 174 #define NLIB_MEMORY_ORDER_SEQ_CST __sync_synchronize() 176 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE) 177 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE) 178 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL) 179 #define NLIB_MEMORY_ORDER_SEQ_CST __atomic_thread_fence(__ATOMIC_SEQ_CST) 183 #define NLIB_PTHREAD_nlib_tls_alloc 184 #define NLIB_PTHREAD_nlib_tls_free 185 #define NLIB_PTHREAD_nlib_tls_setvalue 186 #define NLIB_PTHREAD_nlib_tls_getvalue 188 #ifndef _LIBCPP_VERSION 189 NLIB_CAPABILITY(
"mutex")
193 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP 197 # define NLIB_MUTEX_INITIALIZER (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) ? \ 198 PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP : \ 199 PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) 201 # define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP 204 # define NLIB_PTHREAD_nlib_mutex_init 205 # define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER 209 #define NLIB_PTHREAD_nlib_mutex_lock 210 #define NLIB_PTHREAD_nlib_mutex_unlock 211 #define NLIB_PTHREAD_nlib_mutex_trylock 212 #define NLIB_PTHREAD_nlib_mutex_destroy 215 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP) 216 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP 217 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP 218 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER) 219 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER 220 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER 221 #elif defined(__FreeBSD__) 224 # define NLIB_RECURSIVE_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \ 225 (pthread_mutex_t)255 : (pthread_mutex_t)255) 226 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \ 227 (pthread_mutex_t)255 : (pthread_mutex_t)255) 228 #elif defined(NLIB_ALPINE) 230 #define NLIB_RECURSIVE_MUTEX_INITIALIZER {{{1}}} 231 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER {{{1}}} 236 #if defined(__APPLE__) 243 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER 245 #define NLIB_PTHREAD_nlib_cond_init 246 #define NLIB_PTHREAD_nlib_cond_signal 247 #define NLIB_PTHREAD_nlib_cond_broadcast 248 #define NLIB_PTHREAD_nlib_cond_wait 249 #define NLIB_PTHREAD_nlib_cond_destroy 253 #define NLIB_PTHREAD_nlib_thread_join 254 #define NLIB_PTHREAD_nlib_thread_detach 255 #define NLIB_PTHREAD_nlib_thread_equal 256 #define NLIB_PTHREAD_nlib_thread_self 258 #if defined(__APPLE__) 259 #define NLIB_SPINLOCK_HAS_NATIVE 260 #if __has_include( <os/lock.h> ) 262 #define NLIB_SPINLOCK_INITIALIZER OS_UNFAIR_LOCK_INIT 264 *lock = OS_UNFAIR_LOCK_INIT;
267 os_unfair_lock_lock(lock);
270 return os_unfair_lock_trylock(lock) ? 0 : EBUSY;
273 os_unfair_lock_unlock(lock);
277 #define NLIB_SPINLOCK_INITIALIZER (0) 282 OSSpinLockLock(lock);
285 return OSSpinLockTry(lock) ? 0 : EBUSY;
288 OSSpinLockUnlock(lock);
297 #if defined(__clang__) 298 # if __has_feature(cxx_unicode_literals) 299 # define NLIB_CXX11_NEW_CHARACTER_TYPES 301 # if __has_feature(cxx_exceptions) 302 # if __has_feature(cxx_noexcept) 303 # define NLIB_CXX11_NOEXCEPT 306 # define NLIB_NOEXCEPT 310 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 4) 311 # define NLIB_CXX11_NEW_CHARACTER_TYPES 313 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6) 314 # define NLIB_CXX11_NOEXCEPT 321 #define NLIB_ONCE_HAS_NATIVE 322 #define NLIB_TIMESPEC_HAS_NATIVE 323 #define NLIB_IOVEC_HAS_NATIVE 325 #ifdef PTHREAD_RWLOCK_INITIALIZER 326 # define NLIB_RWLOCK_HAS_NATIVE 328 #ifdef NLIB_RWLOCK_HAS_NATIVE 329 #ifndef _LIBCPP_VERSION 330 NLIB_CAPABILITY(
"mutex")
333 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER 335 #define NLIB_PTHREAD_nlib_rwlock_init 336 #define NLIB_PTHREAD_nlib_rwlock_destroy 337 #define NLIB_PTHREAD_nlib_rwlock_tryrdlock 338 #define NLIB_PTHREAD_nlib_rwlock_trywrlock 339 #define NLIB_PTHREAD_nlib_rwlock_rdlock 340 #define NLIB_PTHREAD_nlib_rwlock_rdunlock 341 #define NLIB_PTHREAD_nlib_rwlock_wrlock 342 #define NLIB_PTHREAD_nlib_rwlock_wrunlock 345 #ifdef PTHREAD_BARRIER_SERIAL_THREAD 346 # define NLIB_BARRIER_HAS_NATIVE 348 #ifdef NLIB_BARRIER_HAS_NATIVE 350 #define NLIB_PTHREAD_nlib_barrier_init 351 #define NLIB_PTHREAD_nlib_barrier_destroy 354 #define NLIB_THREAD_ATTR_HAS_NATIVE 356 #ifndef pthread_cleanup_push 357 # error pthread_cleanup_push must be a macro 360 #ifndef pthread_cleanup_pop 361 # error pthread_cleanup_pop must be a macro 372 #define NLIB_LIBC_nlib_memcmp 373 #define NLIB_LIBC_nlib_strlen 374 #define NLIB_LIBC_nlib_strnlen 375 #if defined(__STDC_LIB_EXT1__) 376 # define NLIB_LIBC_nlib_wcslen 377 # define NLIB_LIBC_nlib_wcsnlen 378 # define NLIB_LIBC_nlib_strncpy 379 # define NLIB_LIBC_nlib_strcpy 380 # define NLIB_LIBC_nlib_wcsncpy 381 # define NLIB_LIBC_nlib_wcscpy 383 #define NLIB_LIBC_nlib_strchr 384 #define NLIB_LIBC_nlib_strrchr 390 #if (defined(__clang__) && defined(NLIB_64BIT)) || \ 391 (defined(__GNUC__) && __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7)) 393 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED 394 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE 395 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE 396 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL 397 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST 399 #if defined(NLIB_DOXYGEN) 404 int32_t desired,
int weak,
405 int success_memorder,
int failure_memorder);
421 int64_t desired,
int weak,
422 int success_memorder,
int failure_memorder);
438 void* desired,
int weak,
439 int success_memorder,
int failure_memorder);
444 #if defined(__has_feature) && __has_feature(thread_sanitizer) 445 #define NLIB_TSAN_LOCK pthread_mutex_lock(&nlib_tsan_lock); 446 #define NLIB_TSAN_UNLOCK pthread_mutex_unlock(&nlib_tsan_lock); 448 #define NLIB_TSAN_LOCK 449 #define NLIB_TSAN_UNLOCK 455 rval = __atomic_load_n(ptr, memorder);
462 __atomic_store_n(ptr, val, memorder);
470 rval = __atomic_exchange_n(ptr, val, memorder);
476 int32_t desired,
int weak,
477 int success_memorder,
int failure_memorder) {
480 rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
481 success_memorder, failure_memorder);
490 rval = __atomic_add_fetch(ptr, val, memorder);
499 rval = __atomic_sub_fetch(ptr, val, memorder);
508 rval = __atomic_and_fetch(ptr, val, memorder);
517 rval = __atomic_xor_fetch(ptr, val, memorder);
526 rval = __atomic_or_fetch(ptr, val, memorder);
535 rval = __atomic_fetch_add(ptr, val, memorder);
544 rval = __atomic_fetch_sub(ptr, val, memorder);
553 rval = __atomic_fetch_and(ptr, val, memorder);
562 rval = __atomic_fetch_xor(ptr, val, memorder);
571 rval = __atomic_fetch_or(ptr, val, memorder);
579 rval = __atomic_load_n(ptr, memorder);
586 __atomic_store_n(ptr, val, memorder);
594 rval = __atomic_exchange_n(ptr, val, memorder);
602 rval = __atomic_exchange_n(ptr, val, memorder);
608 int64_t desired,
int weak,
609 int success_memorder,
int failure_memorder) {
612 rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
613 success_memorder, failure_memorder);
622 rval = __atomic_add_fetch(ptr, val, memorder);
631 rval = __atomic_sub_fetch(ptr, val, memorder);
640 rval = __atomic_and_fetch(ptr, val, memorder);
649 rval = __atomic_xor_fetch(ptr, val, memorder);
658 rval = __atomic_or_fetch(ptr, val, memorder);
667 rval = __atomic_fetch_add(ptr, val, memorder);
676 rval = __atomic_fetch_sub(ptr, val, memorder);
685 rval = __atomic_fetch_and(ptr, val, memorder);
694 rval = __atomic_fetch_xor(ptr, val, memorder);
703 rval = __atomic_fetch_or(ptr, val, memorder);
711 rval = __atomic_load_n(ptr, memorder);
718 __atomic_store_n(ptr, val, memorder);
723 void* desired,
int weak,
724 int success_memorder,
int failure_memorder) {
727 rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
728 success_memorder, failure_memorder);
734 __atomic_thread_fence(memorder);
738 #define NLIB_ATOMIC_RELAXED 0 739 #define NLIB_ATOMIC_ACQUIRE 1 740 #define NLIB_ATOMIC_RELEASE 2 741 #define NLIB_ATOMIC_ACQ_REL 3 742 #define NLIB_ATOMIC_SEQ_CST 7 745 int32_t rval = *(
volatile int32_t*)ptr;
747 #if !defined(__i386__) && !defined(__x86_64__) 757 __sync_synchronize();
760 __sync_lock_test_and_set(ptr, val);
766 __sync_synchronize();
769 return __sync_lock_test_and_set(ptr, val);
773 int32_t desired,
int weak,
774 int success_memorder,
int failure_memorder) {
775 int32_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
776 if (old == *expected)
return 1;
780 (void)success_memorder;
781 (void)failure_memorder;
788 return __sync_add_and_fetch(ptr, val);
794 return __sync_sub_and_fetch(ptr, val);
800 return __sync_and_and_fetch(ptr, val);
806 return __sync_xor_and_fetch(ptr, val);
812 return __sync_or_and_fetch(ptr, val);
818 return __sync_fetch_and_add(ptr, val);
824 return __sync_fetch_and_sub(ptr, val);
830 return __sync_fetch_and_and(ptr, val);
836 return __sync_fetch_and_xor(ptr, val);
842 return __sync_fetch_and_or(ptr, val);
846 int64_t rval = *(
volatile int64_t*)ptr;
848 #if !defined(__i386__) && !defined(__x86_64__) 858 __sync_synchronize();
861 __sync_lock_test_and_set(ptr, val);
867 __sync_synchronize();
870 return __sync_lock_test_and_set(ptr, val);
875 __sync_synchronize();
878 return __sync_lock_test_and_set(ptr, val);
882 int64_t desired,
int weak,
883 int success_memorder,
int failure_memorder) {
884 int64_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
885 if (old == *expected)
return 1;
889 (void)success_memorder;
890 (void)failure_memorder;
897 return __sync_add_and_fetch(ptr, val);
903 return __sync_sub_and_fetch(ptr, val);
909 return __sync_and_and_fetch(ptr, val);
915 return __sync_xor_and_fetch(ptr, val);
921 return __sync_or_and_fetch(ptr, val);
927 return __sync_fetch_and_add(ptr, val);
933 return __sync_fetch_and_sub(ptr, val);
939 return __sync_fetch_and_and(ptr, val);
945 return __sync_fetch_and_xor(ptr, val);
951 return __sync_fetch_and_or(ptr, val);
955 void* rval = *(
void*
volatile *)ptr;
957 #if !defined(__i386__) && !defined(__x86_64__) 966 __sync_synchronize();
969 void* tmp = __sync_lock_test_and_set(ptr, val);
974 void* desired,
int weak,
975 int success_memorder,
int failure_memorder) {
976 void* old = __sync_val_compare_and_swap(ptr, *expected, desired);
977 if (old == *expected)
return 1;
981 (void)success_memorder;
982 (void)failure_memorder;
1012 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_