16 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_ 17 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_ 18 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_ 19 # error do not include directly 22 #if defined(__linux__) || \ 23 defined(__FreeBSD__) || \ 24 defined(__CYGWIN__) || \ 25 (defined(__APPLE__) && defined(__MACH__)) 30 #ifndef __STDC_LIMIT_MACROS 31 #warning __STDC_LIMIT_MACROS not defined, compile may fail 32 #define __STDC_LIMIT_MACROS 34 #ifndef __STDC_CONSTANT_MACROS 35 #warning __STDC_CONSTANT_MACROS not defined, compile may fail 36 #define __STDC_CONSTANT_MACROS 40 #if defined(__APPLE__) && defined(__MACH__) 41 #define _DARWIN_UNLIMITED_SELECT 42 #include <libkern/OSAtomic.h> 44 #if __has_include( <os/lock.h> ) 58 #if !defined(__GNUC__) && !defined(__clang__) 62 #define NLIB_HAS_STDHEADER_INTTYPES 64 #if !defined(__FreeBSD__) && !defined(__APPLE__) 70 #include <sys/types.h> 73 #include <sys/socket.h> 74 #include <netinet/tcp.h> 76 #include <netinet/in.h> 77 #include <arpa/inet.h> 81 #if defined(__i386__) || defined(__x86_64__) 82 # include <x86intrin.h> 86 # define NLIB_VIS_HIDDEN __attribute__((visibility("hidden"))) 87 # define NLIB_VIS_PUBLIC __attribute__((visibility("default"))) 88 # define NLIB_WEAKSYMBOL __attribute__((weak)) 90 # define NLIB_VIS_HIDDEN 91 # define NLIB_VIS_PUBLIC 92 # define NLIB_WEAKSYMBOL 95 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline)) 96 #define NLIB_NEVER_INLINE __attribute__((__noinline__)) 97 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1) 98 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0) 99 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value)) 100 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result)) 101 #define NLIB_NORETURN __attribute__((noreturn)) 102 #define NLIB_NONNULL __attribute__((nonnull)) 103 #define NLIB_NONNULL_1 __attribute__((nonnull (1))) 104 #define NLIB_NONNULL_2 __attribute__((nonnull (2))) 105 #define NLIB_NONNULL_3 __attribute__((nonnull (3))) 106 #define NLIB_NONNULL_4 __attribute__((nonnull (4))) 107 #define NLIB_NONNULL_5 __attribute__((nonnull (5))) 108 #define NLIB_NONNULL_ENABLED 109 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc)) 110 #define NLIB_ATTRIBUTE_PURE __attribute__((pure)) 111 #define NLIB_ATTRIBUTE_CONST __attribute__((const)) 114 # if __has_attribute(alloc_size) 115 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n))) 116 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1))) 118 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) 119 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) 121 # if __has_attribute(alloc_align) 122 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn))) 124 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) 126 # if __has_attribute(assume_aligned) 127 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n))) 129 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) 132 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n))) 133 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1))) 134 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9) 135 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn))) 136 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n))) 138 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) 139 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) 143 #ifndef NLIB_DEPRECATED 144 #define NLIB_DEPRECATED __attribute__((deprecated)) 146 #ifndef NLIB_DEPRECATED_MSG 147 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated)) 150 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN) 151 # define NLIB_LITTLE_ENDIAN 152 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN) 153 # undef NLIB_LITTLE_ENDIAN 155 # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ 156 # define NLIB_LITTLE_ENDIAN 158 # undef NLIB_LITTLE_ENDIAN 161 #if defined(__x86_64__) || defined(__aarch64__) 165 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7 166 # if !defined(__i386__) && !defined(__x86_64__) 169 #define NLIB_MEMORY_ORDER_RELEASE __asm__ __volatile__("sfence": : :"memory") 170 #define NLIB_MEMORY_ORDER_ACQUIRE __asm__ __volatile__("lfence": : :"memory") 171 #define NLIB_MEMORY_ORDER_ACQ_REL __asm__ __volatile__("mfence": : :"memory") 172 #define NLIB_MEMORY_ORDER_SEQ_CST __sync_synchronize() 174 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE) 175 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE) 176 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL) 177 #define NLIB_MEMORY_ORDER_SEQ_CST __atomic_thread_fence(__ATOMIC_SEQ_CST) 181 #define NLIB_PTHREAD_nlib_tls_alloc 182 #define NLIB_PTHREAD_nlib_tls_free 183 #define NLIB_PTHREAD_nlib_tls_setvalue 184 #define NLIB_PTHREAD_nlib_tls_getvalue 186 #ifndef _LIBCPP_VERSION 187 NLIB_CAPABILITY(
"mutex")
191 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP 195 # define NLIB_MUTEX_INITIALIZER (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) ? \ 196 PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP : \ 197 PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) 199 # define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP 202 # define NLIB_PTHREAD_nlib_mutex_init 203 # define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER 207 #define NLIB_PTHREAD_nlib_mutex_lock 208 #define NLIB_PTHREAD_nlib_mutex_unlock 209 #define NLIB_PTHREAD_nlib_mutex_trylock 210 #define NLIB_PTHREAD_nlib_mutex_destroy 213 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP) 214 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP 215 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP 216 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER) 217 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER 218 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER 219 #elif defined(__FreeBSD__) 222 # define NLIB_RECURSIVE_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \ 223 (pthread_mutex_t)255 : (pthread_mutex_t)255) 224 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \ 225 (pthread_mutex_t)255 : (pthread_mutex_t)255) 226 #elif defined(NLIB_ALPINE) 228 #define NLIB_RECURSIVE_MUTEX_INITIALIZER {{{1}}} 229 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER {{{1}}} 234 #if defined(__APPLE__) 241 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER 243 #define NLIB_PTHREAD_nlib_cond_init 244 #define NLIB_PTHREAD_nlib_cond_signal 245 #define NLIB_PTHREAD_nlib_cond_broadcast 246 #define NLIB_PTHREAD_nlib_cond_wait 247 #define NLIB_PTHREAD_nlib_cond_destroy 251 #define NLIB_PTHREAD_nlib_thread_join 252 #define NLIB_PTHREAD_nlib_thread_detach 253 #define NLIB_PTHREAD_nlib_thread_equal 254 #define NLIB_PTHREAD_nlib_thread_self 256 #if defined(__APPLE__) 257 #define NLIB_SPINLOCK_HAS_NATIVE 258 #if 0 && __has_include( <os/lock.h> ) && (MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_12) 260 #define NLIB_SPINLOCK_INITIALIZER OS_UNFAIR_LOCK_INIT 262 *lock = OS_UNFAIR_LOCK_INIT;
265 os_unfair_lock_lock(*lock);
268 return os_unfair_lock_trylock(*lock) ? 0 : EBUSY;
271 os_unfair_lock_unlock(*lock);
275 #define NLIB_SPINLOCK_INITIALIZER (0) 280 OSSpinLockLock(lock);
283 return OSSpinLockTry(lock) ? 0 : EBUSY;
286 OSSpinLockUnlock(lock);
295 #if defined(__clang__) 296 # if __has_feature(cxx_unicode_literals) 297 # define NLIB_CXX11_NEW_CHARACTER_TYPES 299 # if __has_feature(cxx_exceptions) 300 # if __has_feature(cxx_noexcept) 301 # define NLIB_CXX11_NOEXCEPT 304 # define NLIB_NOEXCEPT 308 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 4) 309 # define NLIB_CXX11_NEW_CHARACTER_TYPES 311 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6) 312 # define NLIB_CXX11_NOEXCEPT 319 #define NLIB_TIMESPEC_HAS_NATIVE 320 #define NLIB_IOVEC_HAS_NATIVE 322 #ifdef PTHREAD_RWLOCK_INITIALIZER 323 # define NLIB_RWLOCK_HAS_NATIVE 325 #ifdef NLIB_RWLOCK_HAS_NATIVE 326 #ifndef _LIBCPP_VERSION 327 NLIB_CAPABILITY(
"mutex")
330 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER 332 #define NLIB_PTHREAD_nlib_rwlock_init 333 #define NLIB_PTHREAD_nlib_rwlock_destroy 334 #define NLIB_PTHREAD_nlib_rwlock_tryrdlock 335 #define NLIB_PTHREAD_nlib_rwlock_trywrlock 336 #define NLIB_PTHREAD_nlib_rwlock_rdlock 337 #define NLIB_PTHREAD_nlib_rwlock_rdunlock 338 #define NLIB_PTHREAD_nlib_rwlock_wrlock 339 #define NLIB_PTHREAD_nlib_rwlock_wrunlock 342 #ifdef PTHREAD_BARRIER_SERIAL_THREAD 343 # define NLIB_BARRIER_HAS_NATIVE 345 #ifdef NLIB_BARRIER_HAS_NATIVE 347 #define NLIB_PTHREAD_nlib_barrier_init 348 #define NLIB_PTHREAD_nlib_barrier_destroy 351 #define NLIB_THREAD_ATTR_HAS_NATIVE 353 #ifndef pthread_cleanup_push 354 # error pthread_cleanup_push must be a macro 357 #ifndef pthread_cleanup_pop 358 # error pthread_cleanup_pop must be a macro 369 #define NLIB_LIBC_nlib_memcmp 370 #define NLIB_LIBC_nlib_strlen 371 #define NLIB_LIBC_nlib_strnlen 372 #if defined(__STDC_LIB_EXT1__) 373 # define NLIB_LIBC_nlib_wcslen 374 # define NLIB_LIBC_nlib_wcsnlen 375 # define NLIB_LIBC_nlib_strncpy 376 # define NLIB_LIBC_nlib_strcpy 377 # define NLIB_LIBC_nlib_wcsncpy 378 # define NLIB_LIBC_nlib_wcscpy 380 #define NLIB_LIBC_nlib_strchr 381 #define NLIB_LIBC_nlib_strrchr 387 #if (defined(__clang__) && defined(NLIB_64BIT)) || \ 388 (defined(__GNUC__) && __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7)) 390 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED 391 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE 392 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE 393 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL 394 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST 396 #if defined(NLIB_DOXYGEN) 401 int32_t desired,
int weak,
402 int success_memorder,
int failure_memorder);
418 int64_t desired,
int weak,
419 int success_memorder,
int failure_memorder);
435 void* desired,
int weak,
436 int success_memorder,
int failure_memorder);
441 #if defined(__has_feature) && __has_feature(thread_sanitizer) 442 #define NLIB_TSAN_LOCK pthread_mutex_lock(&nlib_tsan_lock); 443 #define NLIB_TSAN_UNLOCK pthread_mutex_unlock(&nlib_tsan_lock); 445 #define NLIB_TSAN_LOCK 446 #define NLIB_TSAN_UNLOCK 452 rval = __atomic_load_n(ptr, memorder);
459 __atomic_store_n(ptr, val, memorder);
467 rval = __atomic_exchange_n(ptr, val, memorder);
473 int32_t desired,
int weak,
474 int success_memorder,
int failure_memorder) {
477 rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
478 success_memorder, failure_memorder);
487 rval = __atomic_add_fetch(ptr, val, memorder);
496 rval = __atomic_sub_fetch(ptr, val, memorder);
505 rval = __atomic_and_fetch(ptr, val, memorder);
514 rval = __atomic_xor_fetch(ptr, val, memorder);
523 rval = __atomic_or_fetch(ptr, val, memorder);
532 rval = __atomic_fetch_add(ptr, val, memorder);
541 rval = __atomic_fetch_sub(ptr, val, memorder);
550 rval = __atomic_fetch_and(ptr, val, memorder);
559 rval = __atomic_fetch_xor(ptr, val, memorder);
568 rval = __atomic_fetch_or(ptr, val, memorder);
576 rval = __atomic_load_n(ptr, memorder);
583 __atomic_store_n(ptr, val, memorder);
591 rval = __atomic_exchange_n(ptr, val, memorder);
599 rval = __atomic_exchange_n(ptr, val, memorder);
605 int64_t desired,
int weak,
606 int success_memorder,
int failure_memorder) {
609 rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
610 success_memorder, failure_memorder);
619 rval = __atomic_add_fetch(ptr, val, memorder);
628 rval = __atomic_sub_fetch(ptr, val, memorder);
637 rval = __atomic_and_fetch(ptr, val, memorder);
646 rval = __atomic_xor_fetch(ptr, val, memorder);
655 rval = __atomic_or_fetch(ptr, val, memorder);
664 rval = __atomic_fetch_add(ptr, val, memorder);
673 rval = __atomic_fetch_sub(ptr, val, memorder);
682 rval = __atomic_fetch_and(ptr, val, memorder);
691 rval = __atomic_fetch_xor(ptr, val, memorder);
700 rval = __atomic_fetch_or(ptr, val, memorder);
708 rval = __atomic_load_n(ptr, memorder);
715 __atomic_store_n(ptr, val, memorder);
720 void* desired,
int weak,
721 int success_memorder,
int failure_memorder) {
724 rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
725 success_memorder, failure_memorder);
731 __atomic_thread_fence(memorder);
735 #define NLIB_ATOMIC_RELAXED 0 736 #define NLIB_ATOMIC_ACQUIRE 1 737 #define NLIB_ATOMIC_RELEASE 2 738 #define NLIB_ATOMIC_ACQ_REL 3 739 #define NLIB_ATOMIC_SEQ_CST 7 742 int32_t rval = *(
volatile int32_t*)ptr;
744 #if !defined(__i386__) && !defined(__x86_64__) 754 __sync_synchronize();
757 __sync_lock_test_and_set(ptr, val);
763 __sync_synchronize();
766 return __sync_lock_test_and_set(ptr, val);
770 int32_t desired,
int weak,
771 int success_memorder,
int failure_memorder) {
772 int32_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
773 if (old == *expected)
return 1;
777 (void)success_memorder;
778 (void)failure_memorder;
785 return __sync_add_and_fetch(ptr, val);
791 return __sync_sub_and_fetch(ptr, val);
797 return __sync_and_and_fetch(ptr, val);
803 return __sync_xor_and_fetch(ptr, val);
809 return __sync_or_and_fetch(ptr, val);
815 return __sync_fetch_and_add(ptr, val);
821 return __sync_fetch_and_sub(ptr, val);
827 return __sync_fetch_and_and(ptr, val);
833 return __sync_fetch_and_xor(ptr, val);
839 return __sync_fetch_and_or(ptr, val);
843 int64_t rval = *(
volatile int64_t*)ptr;
845 #if !defined(__i386__) && !defined(__x86_64__) 855 __sync_synchronize();
858 __sync_lock_test_and_set(ptr, val);
864 __sync_synchronize();
867 return __sync_lock_test_and_set(ptr, val);
872 __sync_synchronize();
875 return __sync_lock_test_and_set(ptr, val);
879 int64_t desired,
int weak,
880 int success_memorder,
int failure_memorder) {
881 int64_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
882 if (old == *expected)
return 1;
886 (void)success_memorder;
887 (void)failure_memorder;
894 return __sync_add_and_fetch(ptr, val);
900 return __sync_sub_and_fetch(ptr, val);
906 return __sync_and_and_fetch(ptr, val);
912 return __sync_xor_and_fetch(ptr, val);
918 return __sync_or_and_fetch(ptr, val);
924 return __sync_fetch_and_add(ptr, val);
930 return __sync_fetch_and_sub(ptr, val);
936 return __sync_fetch_and_and(ptr, val);
942 return __sync_fetch_and_xor(ptr, val);
948 return __sync_fetch_and_or(ptr, val);
952 void* rval = *(
void*
volatile *)ptr;
954 #if !defined(__i386__) && !defined(__x86_64__) 963 __sync_synchronize();
966 void* tmp = __sync_lock_test_and_set(ptr, val);
971 void* desired,
int weak,
972 int success_memorder,
int failure_memorder) {
973 void* old = __sync_val_compare_and_swap(ptr, *expected, desired);
974 if (old == *expected)
return 1;
978 (void)success_memorder;
979 (void)failure_memorder;
1009 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_