3 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
4 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
5 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
6 # error do not include directly
9 #if defined(__linux__) || \
10 defined(__FreeBSD__) || \
11 defined(__CYGWIN__) || \
12 (defined(__APPLE__) && defined(__MACH__))
14 #if defined(__APPLE__) && defined(__MACH__)
15 #include <libkern/OSAtomic.h>
28 #if !defined(__GNUC__) && !defined(__clang__)
32 #define NLIB_HAS_STDHEADER_STDINT
33 #define NLIB_HAS_STDHEADER_INTTYPES
37 #include <sys/types.h>
39 #include <sys/socket.h>
40 #include <netinet/in.h>
41 #include <arpa/inet.h>
45 #if defined(__i386__) || defined(__x86_64__)
46 # include <x86intrin.h>
50 # define NLIB_VIS_HIDDEN __attribute__((visibility("hidden")))
51 # define NLIB_VIS_PUBLIC __attribute__((visibility("default")))
52 # define NLIB_WEAKSYMBOL __attribute__((weak))
54 # define NLIB_VIS_HIDDEN
55 # define NLIB_VIS_PUBLIC
56 # define NLIB_WEAKSYMBOL
59 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
60 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
61 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
62 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
63 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
64 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
65 #define NLIB_NORETURN __attribute__((noreturn))
66 #define NLIB_NONNULL __attribute__((nonnull))
67 #define NLIB_NONNULL_1 __attribute__((nonnull (1)))
68 #define NLIB_NONNULL_2 __attribute__((nonnull (2)))
69 #define NLIB_NONNULL_3 __attribute__((nonnull (3)))
70 #define NLIB_NONNULL_4 __attribute__((nonnull (4)))
71 #define NLIB_NONNULL_5 __attribute__((nonnull (5)))
72 #define NLIB_NONNULL_ENABLED
73 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
76 # if __has_attribute(alloc_size)
77 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
78 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
80 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
81 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
83 # if __has_attribute(alloc_align)
84 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
86 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
88 # if __has_attribute(assume_aligned)
89 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
91 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
94 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
95 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
96 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
97 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
98 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
100 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
101 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
105 #define NLIB_DEPRECATED __attribute__((deprecated))
106 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated(msg)))
107 #define _Printf_format_string_
109 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN)
110 # define NLIB_LITTLE_ENDIAN
111 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN)
112 # undef NLIB_LITTLE_ENDIAN
114 # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
115 # define NLIB_LITTLE_ENDIAN
117 # undef NLIB_LITTLE_ENDIAN
120 #if defined(__x86_64__) || defined(__aarch64__)
124 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7
125 # if !defined(__i386__) && !defined(__x86_64__)
128 #define NLIB_MEMORY_ORDER_RELEASE __asm__ __volatile__("sfence": : :"memory")
129 #define NLIB_MEMORY_ORDER_ACQUIRE __asm__ __volatile__("lfence": : :"memory")
130 #define NLIB_MEMORY_ORDER_ACQ_REL __asm__ __volatile__("mfence": : :"memory")
132 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE)
133 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE)
134 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL)
139 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
143 # define NLIB_MUTEX_INITIALIZER (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) ? \
144 PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP : \
145 PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP)
147 # define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
150 # define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER
153 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP)
154 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
155 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
156 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER)
157 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
158 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
159 #elif defined(__FreeBSD__)
162 # define NLIB_RECURSIVE_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
163 (pthread_mutex_t)255 : (pthread_mutex_t)255)
164 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
165 (pthread_mutex_t)255 : (pthread_mutex_t)255)
170 #if defined(__APPLE__)
177 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER
182 static inline __attribute__((always_inline)) int
nlib_clz64(uint64_t x) {
183 return x != 0 ? __builtin_clzll(x) : 64;
187 static inline __attribute__((always_inline)) int
nlib_ctz64(uint64_t x) {
188 return x != 0 ? __builtin_ctzll(x) : 64;
192 static inline __attribute__((always_inline)) int
nlib_clz(uint32_t x) {
193 return x != 0 ? __builtin_clz(x) : 32;
197 static inline __attribute__((always_inline)) int
nlib_ctz(uint32_t x) {
198 return x != 0 ? __builtin_ctz(x) : 32;
201 #if defined(__APPLE__)
202 #define NLIB_SPINLOCK_HAS_NATIVE
204 #define NLIB_SPINLOCK_INITIALIZER (0)
209 OSSpinLockLock(lock);
212 return OSSpinLockTry(lock) ? 0 : EBUSY;
215 OSSpinLockUnlock(lock);
223 #if defined(__clang__)
224 # if __has_feature(cxx_unicode_literals)
225 # define NLIB_CXX11_NEW_CHARACTER_TYPES
227 # if __has_feature(cxx_exceptions)
228 # if __has_feature(cxx_noexcept)
229 # define NLIB_CXX11_NOEXCEPT
232 # define NLIB_NOEXCEPT
234 # if 0 // __has_attribute(capability)
235 # define NLIB_LOCKABLE __attribute__((capability("mutex")))
236 # define NLIB_LOCK_FUNC(...) __attribute__((acquire_capability(__VA_ARGS__)))
237 # define NLIB_SHARED_LOCK_FUNC(...) __attribute__((acquire_shared_capability(__VA_ARGS__)))
238 # define NLIB_UNLOCK_FUNC(...) __attribute__((release_capability(__VA_ARGS__)))
239 # define NLIB_SHARED_UNLOCK_FUNC(...) __attribute__((release_shared_capability(__VA_ARGS__)))
240 # define NLIB_TRYLOCK_FUNC(...) __attribute__((try_acquire_capability(__VA_ARGS__)))
241 # define NLIB_SHARED_TRYLOCK_FUNC(...) __attribute__((try_acquire_shared_capability(__VA_ARGS__)))
242 # define NLIB_GUARDED_BY(x) __attribute__((guarded_by(x)))
243 # define NLIB_PT_GUARDED_BY(x) __attribute__((pt_guarded_by(x)))
244 # define NLIB_LOCK_REQUIRED(...) __attribute__((requires_capability(__VA_ARGS__)))
245 # define NLIB_LOCK_EXCLUDED(...) __attribute__((locks_excluded(__VA_ARGS__)))
246 # define NLIB_SHARED_LOCK_REQUIRED(...) __attribute__((requires_shared_capability(__VA_ARGS__)))
247 # define NLIB_SCOPED_LOCKABLE __attribute__((scoped_lockable))
248 # define NLIB_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis))
250 # define NLIB_LOCKABLE __attribute__((lockable))
251 # define NLIB_LOCK_FUNC(...) __attribute__((exclusive_lock_function(__VA_ARGS__)))
252 # define NLIB_SHARED_LOCK_FUNC(...) __attribute__((shared_lock_function(__VA_ARGS__)))
253 # define NLIB_UNLOCK_FUNC(...) __attribute__((unlock_function(__VA_ARGS__)))
254 # define NLIB_TRYLOCK_FUNC(...) __attribute__((exclusive_trylock_function(__VA_ARGS__)))
255 # define NLIB_SHARED_TRYLOCK_FUNC(...) __attribute__((shared_trylock_function(__VA_ARGS__)))
256 # define NLIB_GUARDED_BY(x) __attribute__((guarded_by(x)))
257 # define NLIB_PT_GUARDED_BY(x) __attribute__((pt_guarded_by(x)))
258 # define NLIB_LOCK_REQUIRED(...) __attribute__((exclusive_locks_required(__VA_ARGS__)))
259 # define NLIB_LOCK_EXCLUDED(...) __attribute__((locks_excluded(__VA_ARGS__)))
260 # define NLIB_SHARED_LOCK_REQUIRED(...) __attribute__((shared_locks_required(__VA_ARGS__)))
261 # define NLIB_SCOPED_LOCKABLE __attribute__((scoped_lockable))
262 # define NLIB_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis))
266 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 4)
267 # define NLIB_CXX11_NEW_CHARACTER_TYPES
269 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)
270 # define NLIB_CXX11_NOEXCEPT
277 #ifdef PTHREAD_RWLOCK_INITIALIZER
278 # define NLIB_RWLOCK_HAS_NATIVE
280 #ifdef NLIB_RWLOCK_HAS_NATIVE
282 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER
285 #ifdef PTHREAD_BARRIER_SERIAL_THREAD
286 # define NLIB_BARRIER_HAS_NATIVE
288 #ifdef NLIB_BARRIER_HAS_NATIVE
292 #define NLIB_THREAD_ATTR_HAS_NATIVE
294 #ifndef pthread_cleanup_push
295 # error pthread_cleanup_push must be a macro
298 #ifndef pthread_cleanup_pop
299 # error pthread_cleanup_pop must be a macro
306 #if (defined(__clang__) && defined(NLIB_64BIT)) || \
307 (defined(__GNUC__) && __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
309 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED
310 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
311 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE
312 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
313 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
320 int32_t desired,
int weak,
321 int success_memorder,
int failure_memorder);
337 int64_t desired,
int weak,
338 int success_memorder,
int failure_memorder);
353 void* desired,
int weak,
354 int success_memorder,
int failure_memorder);
359 return __atomic_load_n(ptr, memorder);
363 __atomic_store_n(ptr, val, memorder);
368 return __atomic_exchange_n(ptr, val, memorder);
372 int32_t desired,
int weak,
373 int success_memorder,
int failure_memorder) {
374 return __atomic_compare_exchange_n(ptr, expected, desired, weak,
375 success_memorder, failure_memorder);
380 return __atomic_add_fetch(ptr, val, memorder);
385 return __atomic_sub_fetch(ptr, val, memorder);
390 return __atomic_and_fetch(ptr, val, memorder);
395 return __atomic_xor_fetch(ptr, val, memorder);
400 return __atomic_or_fetch(ptr, val, memorder);
405 return __atomic_fetch_add(ptr, val, memorder);
410 return __atomic_fetch_sub(ptr, val, memorder);
415 return __atomic_fetch_and(ptr, val, memorder);
420 return __atomic_fetch_xor(ptr, val, memorder);
425 return __atomic_fetch_or(ptr, val, memorder);
429 return __atomic_load_n(ptr, memorder);
433 __atomic_store_n(ptr, val, memorder);
438 return __atomic_exchange_n(ptr, val, memorder);
442 int64_t desired,
int weak,
443 int success_memorder,
int failure_memorder) {
444 return __atomic_compare_exchange_n(ptr, expected, desired, weak,
445 success_memorder, failure_memorder);
450 return __atomic_add_fetch(ptr, val, memorder);
455 return __atomic_sub_fetch(ptr, val, memorder);
460 return __atomic_and_fetch(ptr, val, memorder);
465 return __atomic_xor_fetch(ptr, val, memorder);
470 return __atomic_or_fetch(ptr, val, memorder);
475 return __atomic_fetch_add(ptr, val, memorder);
480 return __atomic_fetch_sub(ptr, val, memorder);
485 return __atomic_fetch_and(ptr, val, memorder);
490 return __atomic_fetch_xor(ptr, val, memorder);
495 return __atomic_fetch_or(ptr, val, memorder);
499 return __atomic_load_n(ptr, memorder);
503 __atomic_store_n(ptr, val, memorder);
507 void* desired,
int weak,
508 int success_memorder,
int failure_memorder) {
509 return __atomic_compare_exchange_n(ptr, expected, desired, weak,
510 success_memorder, failure_memorder);
514 __atomic_thread_fence(memorder);
518 #define NLIB_ATOMIC_RELAXED 0
519 #define NLIB_ATOMIC_ACQUIRE 1
520 #define NLIB_ATOMIC_RELEASE 2
521 #define NLIB_ATOMIC_ACQ_REL 3
522 #define NLIB_ATOMIC_SEQ_CST 7
525 int32_t rval = *(
volatile int32_t*)ptr;
527 #if !defined(__i386__) && !defined(__x86_64__)
537 __sync_synchronize();
540 __sync_lock_test_and_set(ptr, val);
546 __sync_synchronize();
549 return __sync_lock_test_and_set(ptr, val);
553 int32_t desired,
int weak,
554 int success_memorder,
int failure_memorder) {
555 int32_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
556 if (old == *expected)
return 1;
560 (void)success_memorder;
561 (void)failure_memorder;
568 return __sync_add_and_fetch(ptr, val);
574 return __sync_sub_and_fetch(ptr, val);
580 return __sync_and_and_fetch(ptr, val);
586 return __sync_xor_and_fetch(ptr, val);
592 return __sync_or_and_fetch(ptr, val);
598 return __sync_fetch_and_add(ptr, val);
604 return __sync_fetch_and_sub(ptr, val);
610 return __sync_fetch_and_and(ptr, val);
616 return __sync_fetch_and_xor(ptr, val);
622 return __sync_fetch_and_or(ptr, val);
626 int64_t rval = *(
volatile int64_t*)ptr;
628 #if !defined(__i386__) && !defined(__x86_64__)
638 __sync_synchronize();
641 __sync_lock_test_and_set(ptr, val);
647 __sync_synchronize();
650 return __sync_lock_test_and_set(ptr, val);
654 int64_t desired,
int weak,
655 int success_memorder,
int failure_memorder) {
656 int64_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
657 if (old == *expected)
return 1;
661 (void)success_memorder;
662 (void)failure_memorder;
669 return __sync_add_and_fetch(ptr, val);
675 return __sync_sub_and_fetch(ptr, val);
681 return __sync_and_and_fetch(ptr, val);
687 return __sync_xor_and_fetch(ptr, val);
693 return __sync_or_and_fetch(ptr, val);
699 return __sync_fetch_and_add(ptr, val);
705 return __sync_fetch_and_sub(ptr, val);
711 return __sync_fetch_and_and(ptr, val);
717 return __sync_fetch_and_xor(ptr, val);
723 return __sync_fetch_and_or(ptr, val);
727 void* rval = *(
void*
volatile *)ptr;
729 #if !defined(__i386__) && !defined(__x86_64__)
738 __sync_synchronize();
741 void* tmp = __sync_lock_test_and_set(ptr, val);
746 void* desired,
int weak,
747 int success_memorder,
int failure_memorder) {
748 void* old = __sync_val_compare_and_swap(ptr, *expected, desired);
749 if (old == *expected)
return 1;
753 (void)success_memorder;
754 (void)failure_memorder;
772 __sync_synchronize();
784 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_