nlib
Platform_unix.h
1 
2 #pragma once
3 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
4 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
5 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
6 # error do not include directly
7 #endif
8 
9 #if defined(__linux__) || \
10  defined(__FreeBSD__) || \
11  defined(__CYGWIN__) || \
12  (defined(__APPLE__) && defined(__MACH__))
13 
14 #if defined(__APPLE__) && defined(__MACH__)
15 #include <libkern/OSAtomic.h>
16 #include <errno.h>
17 #endif
18 
19 #ifdef __cplusplus
20 extern "C" {
21 #endif
22 
23 #ifndef NLIB_UNIX
24 # define NLIB_UNIX
25 #endif
26 
27 // For now, only supports gcc or clang
28 #if !defined(__GNUC__) && !defined(__clang__)
29 # error
30 #endif
31 
32 #define NLIB_HAS_STDHEADER_STDINT
33 #define NLIB_HAS_STDHEADER_INTTYPES
34 
35 #include <pthread.h> // for PTHREAD_MUTEX_INITIALIZER, ....
36 #include <semaphore.h> // for sem_t
37 #include <sys/types.h> // for pthread_mutex_t, ....
38 #include <fcntl.h> // NOLINT
39 #include <sys/socket.h>
40 #include <netinet/in.h>
41 #include <arpa/inet.h>
42 #include <netdb.h>
43 #include <stdint.h>
44 
45 #if defined(__i386__) || defined(__x86_64__)
46 # include <x86intrin.h>
47 #endif
48 
49 #ifndef __CYGWIN__
50 # define NLIB_VIS_HIDDEN __attribute__((visibility("hidden")))
51 # define NLIB_VIS_PUBLIC __attribute__((visibility("default")))
52 # define NLIB_WEAKSYMBOL __attribute__((weak))
53 #else
54 # define NLIB_VIS_HIDDEN
55 # define NLIB_VIS_PUBLIC
56 # define NLIB_WEAKSYMBOL
57 #endif
58 
59 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
60 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
61 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
62 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
63 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
64 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
65 #define NLIB_NORETURN __attribute__((noreturn))
66 #define NLIB_NONNULL __attribute__((nonnull))
67 #define NLIB_NONNULL_1 __attribute__((nonnull (1)))
68 #define NLIB_NONNULL_2 __attribute__((nonnull (2)))
69 #define NLIB_NONNULL_3 __attribute__((nonnull (3)))
70 #define NLIB_NONNULL_4 __attribute__((nonnull (4)))
71 #define NLIB_NONNULL_5 __attribute__((nonnull (5)))
72 #define NLIB_NONNULL_ENABLED
73 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
74 
75 #ifdef __clang__
76 # if __has_attribute(alloc_size)
77 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
78 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
79 # else
80 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
81 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
82 # endif
83 # if __has_attribute(alloc_align)
84 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
85 # else
86 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
87 # endif
88 # if __has_attribute(assume_aligned)
89 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
90 # else
91 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
92 # endif
93 #else
94 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
95 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
96 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
97 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
98 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
99 # else
100 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
101 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
102 # endif
103 #endif
104 
105 #define NLIB_DEPRECATED __attribute__((deprecated))
106 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated(msg)))
107 #define _Printf_format_string_
108 
109 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN)
110 # define NLIB_LITTLE_ENDIAN
111 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN)
112 # undef NLIB_LITTLE_ENDIAN
113 #else
114 # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
115 # define NLIB_LITTLE_ENDIAN
116 # else
117 # undef NLIB_LITTLE_ENDIAN
118 # endif
119 #endif
120 #if defined(__x86_64__) || defined(__aarch64__)
121 # define NLIB_64BIT
122 #endif
123 
124 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7
125 # if !defined(__i386__) && !defined(__x86_64__)
126 # error Sorry
127 # endif
128 #define NLIB_MEMORY_ORDER_RELEASE __asm__ __volatile__("sfence": : :"memory")
129 #define NLIB_MEMORY_ORDER_ACQUIRE __asm__ __volatile__("lfence": : :"memory")
130 #define NLIB_MEMORY_ORDER_ACQ_REL __asm__ __volatile__("mfence": : :"memory")
131 #else
132 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE)
133 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE)
134 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL)
135 #endif
136 
137 typedef pthread_mutex_t nlib_mutex;
138 
139 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
140 # ifdef __FreeBSD__
141 // https://stackoverflow.com/questions/10369606/constexpr-pointer-value
142 // ((pthread_mutex_t)1) in FreeBSD
143 # define NLIB_MUTEX_INITIALIZER (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) ? \
144  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP : \
145  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP)
146 # else
147 # define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
148 # endif
149 #else
150 # define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER
151 #endif
152 
153 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP)
154 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
155 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
156 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER)
157 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
158 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
159 #elif defined(__FreeBSD__)
160 // FreeBSD does not support static initializer for recursive mutex
161 // nlib initializes recursive mutex if the value is 255
162 # define NLIB_RECURSIVE_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
163  (pthread_mutex_t)255 : (pthread_mutex_t)255)
164 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
165  (pthread_mutex_t)255 : (pthread_mutex_t)255)
166 #else
167 # error Sorry
168 #endif
169 
170 #if defined(__APPLE__)
171 typedef int nlib_semaphore;
172 #else
173 typedef sem_t nlib_semaphore;
174 #endif
175 
176 typedef pthread_cond_t nlib_cond;
177 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER
178 
179 typedef pthread_t nlib_thread;
180 
181 // nlib_clz64(INT64_MIN) -> 0, nlib_clz64(1) -> 63
182 static inline __attribute__((always_inline)) int nlib_clz64(uint64_t x) {
183  return x != 0 ? __builtin_clzll(x) : 64;
184 }
185 
186 // nlib_ctz64(INT64_MIN) -> 63, nlib_ctz64(1) -> 0
187 static inline __attribute__((always_inline)) int nlib_ctz64(uint64_t x) {
188  return x != 0 ? __builtin_ctzll(x) : 64;
189 }
190 
191 // nlib_clz(0x80000000) -> 0, nlib_clz(1) -> 31
192 static inline __attribute__((always_inline)) int nlib_clz(uint32_t x) {
193  return x != 0 ? __builtin_clz(x) : 32;
194 }
195 
196 // nlib_clz(0x80000000) -> 31, nlib_clz(1) -> 0
197 static inline __attribute__((always_inline)) int nlib_ctz(uint32_t x) {
198  return x != 0 ? __builtin_ctz(x) : 32;
199 }
200 
201 #if defined(__APPLE__)
202 #define NLIB_SPINLOCK_HAS_NATIVE
203 typedef OSSpinLock nlib_spinlock;
204 #define NLIB_SPINLOCK_INITIALIZER (0)
205 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
206  *lock = 0;
207 }
208 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
209  OSSpinLockLock(lock);
210 }
211 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
212  return OSSpinLockTry(lock) ? 0 : EBUSY;
213 }
214 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
215  OSSpinLockUnlock(lock);
216 }
217 #endif
218 
219 #ifdef __cplusplus
220 }
221 #endif
222 
223 #if defined(__clang__)
224 # if __has_feature(cxx_unicode_literals)
225 # define NLIB_CXX11_NEW_CHARACTER_TYPES
226 # endif
227 # if __has_feature(cxx_exceptions)
228 # if __has_feature(cxx_noexcept)
229 # define NLIB_CXX11_NOEXCEPT
230 # endif
231 # else
232 # define NLIB_NOEXCEPT
233 # endif
234 # if 0 // __has_attribute(capability)
235 # define NLIB_LOCKABLE __attribute__((capability("mutex")))
236 # define NLIB_LOCK_FUNC(...) __attribute__((acquire_capability(__VA_ARGS__)))
237 # define NLIB_SHARED_LOCK_FUNC(...) __attribute__((acquire_shared_capability(__VA_ARGS__)))
238 # define NLIB_UNLOCK_FUNC(...) __attribute__((release_capability(__VA_ARGS__)))
239 # define NLIB_SHARED_UNLOCK_FUNC(...) __attribute__((release_shared_capability(__VA_ARGS__)))
240 # define NLIB_TRYLOCK_FUNC(...) __attribute__((try_acquire_capability(__VA_ARGS__)))
241 # define NLIB_SHARED_TRYLOCK_FUNC(...) __attribute__((try_acquire_shared_capability(__VA_ARGS__)))
242 # define NLIB_GUARDED_BY(x) __attribute__((guarded_by(x)))
243 # define NLIB_PT_GUARDED_BY(x) __attribute__((pt_guarded_by(x)))
244 # define NLIB_LOCK_REQUIRED(...) __attribute__((requires_capability(__VA_ARGS__)))
245 # define NLIB_LOCK_EXCLUDED(...) __attribute__((locks_excluded(__VA_ARGS__)))
246 # define NLIB_SHARED_LOCK_REQUIRED(...) __attribute__((requires_shared_capability(__VA_ARGS__)))
247 # define NLIB_SCOPED_LOCKABLE __attribute__((scoped_lockable))
248 # define NLIB_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis))
249 # else
250 # define NLIB_LOCKABLE __attribute__((lockable))
251 # define NLIB_LOCK_FUNC(...) __attribute__((exclusive_lock_function(__VA_ARGS__)))
252 # define NLIB_SHARED_LOCK_FUNC(...) __attribute__((shared_lock_function(__VA_ARGS__)))
253 # define NLIB_UNLOCK_FUNC(...) __attribute__((unlock_function(__VA_ARGS__)))
254 # define NLIB_TRYLOCK_FUNC(...) __attribute__((exclusive_trylock_function(__VA_ARGS__)))
255 # define NLIB_SHARED_TRYLOCK_FUNC(...) __attribute__((shared_trylock_function(__VA_ARGS__)))
256 # define NLIB_GUARDED_BY(x) __attribute__((guarded_by(x)))
257 # define NLIB_PT_GUARDED_BY(x) __attribute__((pt_guarded_by(x)))
258 # define NLIB_LOCK_REQUIRED(...) __attribute__((exclusive_locks_required(__VA_ARGS__)))
259 # define NLIB_LOCK_EXCLUDED(...) __attribute__((locks_excluded(__VA_ARGS__)))
260 # define NLIB_SHARED_LOCK_REQUIRED(...) __attribute__((shared_locks_required(__VA_ARGS__)))
261 # define NLIB_SCOPED_LOCKABLE __attribute__((scoped_lockable))
262 # define NLIB_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis))
263 # endif
264 #else
265 // __GNUC__
266 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 4)
267 # define NLIB_CXX11_NEW_CHARACTER_TYPES
268 # endif
269 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)
270 # define NLIB_CXX11_NOEXCEPT
271 # endif
272 // # if !defined(__CYGWIN__) && (!defined(__GLIBC__) || __GLIBC__ < 2 || __GLIBC_MINOR__ < 1)
273 // # error Sorry, glibc is old.
274 // # endif
275 #endif
276 
277 #ifdef PTHREAD_RWLOCK_INITIALIZER
278 # define NLIB_RWLOCK_HAS_NATIVE
279 #endif
280 #ifdef NLIB_RWLOCK_HAS_NATIVE
281 typedef pthread_rwlock_t nlib_rwlock;
282 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER
283 #endif
284 
285 #ifdef PTHREAD_BARRIER_SERIAL_THREAD
286 # define NLIB_BARRIER_HAS_NATIVE
287 #endif
288 #ifdef NLIB_BARRIER_HAS_NATIVE
289 typedef pthread_barrier_t nlib_barrier;
290 #endif
291 
292 #define NLIB_THREAD_ATTR_HAS_NATIVE
293 
294 #ifndef pthread_cleanup_push
295 # error pthread_cleanup_push must be a macro
296 #endif
297 
298 #ifndef pthread_cleanup_pop
299 # error pthread_cleanup_pop must be a macro
300 #endif
301 
302 #ifdef __cplusplus
303 extern "C" {
304 #endif
305 
306 #if (defined(__clang__) && defined(NLIB_64BIT)) || \
307  (defined(__GNUC__) && __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
308 
309 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED
310 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
311 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE
312 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
313 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
314 
315 #ifdef NLIB_DOXYGEN
316 int32_t nlib_atomic_load32(const int32_t* ptr, int memorder);
317 void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder);
318 int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val, int memorder);
319 int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
320  int32_t desired, int weak,
321  int success_memorder, int failure_memorder);
322 int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val, int memorder);
323 int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val, int memorder);
324 int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val, int memorder);
325 int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val, int memorder);
326 int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val, int memorder);
327 int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val, int memorder);
328 int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val, int memorder);
329 int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val, int memorder);
330 int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val, int memorder);
331 int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val, int memorder);
332 
333 int64_t nlib_atomic_load64(const int64_t* ptr, int memorder);
334 void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder);
335 int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val, int memorder);
336 int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
337  int64_t desired, int weak,
338  int success_memorder, int failure_memorder);
339 int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val, int memorder);
340 int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val, int memorder);
341 int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val, int memorder);
342 int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val, int memorder);
343 int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val, int memorder);
344 int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val, int memorder);
345 int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val, int memorder);
346 int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val, int memorder);
347 int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val, int memorder);
348 int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val, int memorder);
349 
350 void* nlib_atomic_loadptr(void* const* ptr, int memorder);
351 void nlib_atomic_storeptr(void** ptr, void* val, int memorder);
352 int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
353  void* desired, int weak,
354  int success_memorder, int failure_memorder);
355 void nlib_atomic_thread_fence(int memorder);
356 #endif
357 
358 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
359  return __atomic_load_n(ptr, memorder);
360 }
361 
362 static __inline void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder) {
363  __atomic_store_n(ptr, val, memorder);
364 }
365 
366 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
367  int memorder) {
368  return __atomic_exchange_n(ptr, val, memorder);
369 }
370 
371 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
372  int32_t desired, int weak,
373  int success_memorder, int failure_memorder) {
374  return __atomic_compare_exchange_n(ptr, expected, desired, weak,
375  success_memorder, failure_memorder);
376 }
377 
378 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
379  int memorder) {
380  return __atomic_add_fetch(ptr, val, memorder);
381 }
382 
383 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
384  int memorder) {
385  return __atomic_sub_fetch(ptr, val, memorder);
386 }
387 
388 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
389  int memorder) {
390  return __atomic_and_fetch(ptr, val, memorder);
391 }
392 
393 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
394  int memorder) {
395  return __atomic_xor_fetch(ptr, val, memorder);
396 }
397 
398 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
399  int memorder) {
400  return __atomic_or_fetch(ptr, val, memorder);
401 }
402 
403 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
404  int memorder) {
405  return __atomic_fetch_add(ptr, val, memorder);
406 }
407 
408 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
409  int memorder) {
410  return __atomic_fetch_sub(ptr, val, memorder);
411 }
412 
413 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
414  int memorder) {
415  return __atomic_fetch_and(ptr, val, memorder);
416 }
417 
418 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
419  int memorder) {
420  return __atomic_fetch_xor(ptr, val, memorder);
421 }
422 
423 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
424  int memorder) {
425  return __atomic_fetch_or(ptr, val, memorder);
426 }
427 
428 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
429  return __atomic_load_n(ptr, memorder);
430 }
431 
432 static __inline void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder) {
433  __atomic_store_n(ptr, val, memorder);
434 }
435 
436 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
437  int memorder) {
438  return __atomic_exchange_n(ptr, val, memorder);
439 }
440 
441 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
442  int64_t desired, int weak,
443  int success_memorder, int failure_memorder) {
444  return __atomic_compare_exchange_n(ptr, expected, desired, weak,
445  success_memorder, failure_memorder);
446 }
447 
448 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
449  int memorder) {
450  return __atomic_add_fetch(ptr, val, memorder);
451 }
452 
453 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
454  int memorder) {
455  return __atomic_sub_fetch(ptr, val, memorder);
456 }
457 
458 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
459  int memorder) {
460  return __atomic_and_fetch(ptr, val, memorder);
461 }
462 
463 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
464  int memorder) {
465  return __atomic_xor_fetch(ptr, val, memorder);
466 }
467 
468 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
469  int memorder) {
470  return __atomic_or_fetch(ptr, val, memorder);
471 }
472 
473 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
474  int memorder) {
475  return __atomic_fetch_add(ptr, val, memorder);
476 }
477 
478 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
479  int memorder) {
480  return __atomic_fetch_sub(ptr, val, memorder);
481 }
482 
483 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
484  int memorder) {
485  return __atomic_fetch_and(ptr, val, memorder);
486 }
487 
488 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
489  int memorder) {
490  return __atomic_fetch_xor(ptr, val, memorder);
491 }
492 
493 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
494  int memorder) {
495  return __atomic_fetch_or(ptr, val, memorder);
496 }
497 
498 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
499  return __atomic_load_n(ptr, memorder);
500 }
501 
502 static __inline void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
503  __atomic_store_n(ptr, val, memorder);
504 }
505 
506 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
507  void* desired, int weak,
508  int success_memorder, int failure_memorder) {
509  return __atomic_compare_exchange_n(ptr, expected, desired, weak,
510  success_memorder, failure_memorder);
511 }
512 
513 static __inline void nlib_atomic_thread_fence(int memorder) {
514  __atomic_thread_fence(memorder);
515 }
516 
517 #else
518 #define NLIB_ATOMIC_RELAXED 0
519 #define NLIB_ATOMIC_ACQUIRE 1
520 #define NLIB_ATOMIC_RELEASE 2
521 #define NLIB_ATOMIC_ACQ_REL 3
522 #define NLIB_ATOMIC_SEQ_CST 7
523 
524 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
525  int32_t rval = *(volatile int32_t*)ptr;
526  (void)memorder;
527 #if !defined(__i386__) && !defined(__x86_64__)
528  if (memorder & NLIB_ATOMIC_ACQUIRE)
530 #endif
531  return rval;
532 }
533 
534 static NLIB_ALWAYS_INLINE void nlib_atomic_store32(int32_t* ptr, int32_t val,
535  int memorder) {
536  if (memorder == NLIB_ATOMIC_SEQ_CST)
537  __sync_synchronize();
538  else if (memorder & NLIB_ATOMIC_RELEASE)
540  __sync_lock_test_and_set(ptr, val);
541 }
542 
543 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
544  int memorder) {
545  if (memorder == NLIB_ATOMIC_SEQ_CST)
546  __sync_synchronize();
547  else if (memorder & NLIB_ATOMIC_RELEASE)
549  return __sync_lock_test_and_set(ptr, val);
550 }
551 
552 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
553  int32_t desired, int weak,
554  int success_memorder, int failure_memorder) {
555  int32_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
556  if (old == *expected) return 1;
557  *expected = old;
558 
559  (void)weak;
560  (void)success_memorder;
561  (void)failure_memorder;
562  return 0;
563 }
564 
565 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
566  int memorder) {
567  (void)memorder;
568  return __sync_add_and_fetch(ptr, val);
569 }
570 
571 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
572  int memorder) {
573  (void)memorder;
574  return __sync_sub_and_fetch(ptr, val);
575 }
576 
577 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
578  int memorder) {
579  (void)memorder;
580  return __sync_and_and_fetch(ptr, val);
581 }
582 
583 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
584  int memorder) {
585  (void)memorder;
586  return __sync_xor_and_fetch(ptr, val);
587 }
588 
589 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
590  int memorder) {
591  (void)memorder;
592  return __sync_or_and_fetch(ptr, val);
593 }
594 
595 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
596  int memorder) {
597  (void)memorder;
598  return __sync_fetch_and_add(ptr, val);
599 }
600 
601 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
602  int memorder) {
603  (void)memorder;
604  return __sync_fetch_and_sub(ptr, val);
605 }
606 
607 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
608  int memorder) {
609  (void)memorder;
610  return __sync_fetch_and_and(ptr, val);
611 }
612 
613 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
614  int memorder) {
615  (void)memorder;
616  return __sync_fetch_and_xor(ptr, val);
617 }
618 
619 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
620  int memorder) {
621  (void)memorder;
622  return __sync_fetch_and_or(ptr, val);
623 }
624 
625 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
626  int64_t rval = *(volatile int64_t*)ptr;
627  (void)memorder;
628 #if !defined(__i386__) && !defined(__x86_64__)
629  if (memorder & NLIB_ATOMIC_ACQUIRE)
631 #endif
632  return rval;
633 }
634 
635 static NLIB_ALWAYS_INLINE void nlib_atomic_store64(int64_t* ptr, int64_t val,
636  int memorder) {
637  if (memorder == NLIB_ATOMIC_SEQ_CST)
638  __sync_synchronize();
639  else if (memorder & NLIB_ATOMIC_RELEASE)
641  __sync_lock_test_and_set(ptr, val);
642 }
643 
644 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
645  int memorder) {
646  if (memorder == NLIB_ATOMIC_SEQ_CST)
647  __sync_synchronize();
648  else if (memorder & NLIB_ATOMIC_RELEASE)
650  return __sync_lock_test_and_set(ptr, val);
651 }
652 
653 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
654  int64_t desired, int weak,
655  int success_memorder, int failure_memorder) {
656  int64_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
657  if (old == *expected) return 1;
658  *expected = old;
659 
660  (void)weak;
661  (void)success_memorder;
662  (void)failure_memorder;
663  return 0;
664 }
665 
666 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
667  int memorder) {
668  (void)memorder;
669  return __sync_add_and_fetch(ptr, val);
670 }
671 
672 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
673  int memorder) {
674  (void)memorder;
675  return __sync_sub_and_fetch(ptr, val);
676 }
677 
678 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
679  int memorder) {
680  (void)memorder;
681  return __sync_and_and_fetch(ptr, val);
682 }
683 
684 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
685  int memorder) {
686  (void)memorder;
687  return __sync_xor_and_fetch(ptr, val);
688 }
689 
690 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
691  int memorder) {
692  (void)memorder;
693  return __sync_or_and_fetch(ptr, val);
694 }
695 
696 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
697  int memorder) {
698  (void)memorder;
699  return __sync_fetch_and_add(ptr, val);
700 }
701 
702 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
703  int memorder) {
704  (void)memorder;
705  return __sync_fetch_and_sub(ptr, val);
706 }
707 
708 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
709  int memorder) {
710  (void)memorder;
711  return __sync_fetch_and_and(ptr, val);
712 }
713 
714 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
715  int memorder) {
716  (void)memorder;
717  return __sync_fetch_and_xor(ptr, val);
718 }
719 
720 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
721  int memorder) {
722  (void)memorder;
723  return __sync_fetch_and_or(ptr, val);
724 }
725 
726 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
727  void* rval = *(void* volatile *)ptr; // NOLINT
728  (void)memorder;
729 #if !defined(__i386__) && !defined(__x86_64__)
730  if (memorder & NLIB_ATOMIC_ACQUIRE)
732 #endif
733  return rval;
734 }
735 
736 static NLIB_ALWAYS_INLINE void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
737  if (memorder == NLIB_ATOMIC_SEQ_CST)
738  __sync_synchronize();
739  else if (memorder & NLIB_ATOMIC_RELEASE)
741  void* tmp = __sync_lock_test_and_set(ptr, val);
742  (void)tmp;
743 }
744 
745 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
746  void* desired, int weak,
747  int success_memorder, int failure_memorder) {
748  void* old = __sync_val_compare_and_swap(ptr, *expected, desired);
749  if (old == *expected) return 1;
750  *expected = old;
751 
752  (void)weak;
753  (void)success_memorder;
754  (void)failure_memorder;
755  return 0;
756 }
757 
758 static __inline void nlib_atomic_thread_fence(int memorder) {
759  switch (memorder) {
760  case NLIB_ATOMIC_RELAXED:
761  break;
762  case NLIB_ATOMIC_ACQUIRE:
764  break;
765  case NLIB_ATOMIC_RELEASE:
767  break;
768  case NLIB_ATOMIC_ACQ_REL:
770  break;
771  default:
772  __sync_synchronize();
773  break;
774  }
775 }
776 #endif
777 
778 
779 #ifdef __cplusplus
780 }
781 #endif
782 
783 #endif
784 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
int32_t nlib_atomic_xor_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_fetch_and64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
int nlib_atomic_compare_exchangeptr(void **ptr, void **expected, void *desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int32_t nlib_atomic_load32(const int32_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
int64_t nlib_atomic_fetch_add64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
struct nlib_rwlock_ nlib_rwlock
The type for a read-write lock object.
Definition: Platform.h:689
sem_t nlib_semaphore
The type for a semaphore object.
int32_t nlib_atomic_or_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
#define NLIB_ATOMIC_RELEASE
Similar to __ATOMIC_RELEASE of gcc or std::memory_order_release of C++11.
int64_t nlib_atomic_fetch_sub64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int64_t nlib_atomic_and_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
#define NLIB_ATOMIC_SEQ_CST
Similar to __ATOMIC_SEQ_CST of gcc or std::memory_order_seq_cst of C++11.
int nlib_atomic_compare_exchange64(int64_t *ptr, int64_t *expected, int64_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int nlib_ctz(uint32_t x)
Returns the number of consecutive zero bits, with respect to the least significant bit (LSB)...
int64_t nlib_atomic_fetch_or64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
#define NLIB_MEMORY_ORDER_ACQ_REL
A memory fence. Corresponds to atomic_thread_fence(memory_order_acq_rel) in C++11.
int nlib_clz(uint32_t x)
Returns the number of consecutive zero bits, with respect to the most significant bit (MSB)...
void * nlib_atomic_loadptr(void *const *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
int64_t nlib_atomic_exchange64(int64_t *ptr, int64_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
int64_t nlib_atomic_sub_fetch64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_xor32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
struct nlib_barrier_ nlib_barrier
The type for a barrier object.
Definition: Platform.h:754
int32_t nlib_atomic_sub_fetch32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_sub32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int32_t nlib_atomic_add_fetch32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
void nlib_atomic_storeptr(void **ptr, void *val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
#define NLIB_MEMORY_ORDER_RELEASE
A memory fence. Corresponds to atomic_thread_fence(memory_order_release) in C++11.
Defines a semaphore.
void nlib_atomic_thread_fence(int memorder)
Places the specified memory barrier.
int32_t nlib_atomic_fetch_and32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
pthread_cond_t nlib_cond
The type for a condition variable object.
#define NLIB_ATOMIC_ACQ_REL
Similar to __ATOMIC_ACQ_REL of gcc or std::memory_order_acq_rel of C++11.
#define NLIB_ATOMIC_ACQUIRE
Similar to __ATOMIC_ACQUIRE of gcc or std::memory_order_acquire of C++11.
int32_t nlib_spinlock
Spinlock variable type. Used by statically initializing with NLIB_SPINLOCK_INITIALIZER.
Definition: Platform.h:837
#define NLIB_MEMORY_ORDER_ACQUIRE
A memory fence. Corresponds to atomic_thread_fence(memory_order_acquire) in C++11.
#define NLIB_ATOMIC_RELAXED
Similar to __ATOMIC_RELAXED of gcc or std::memory_order_relaxed of C++11.
pthread_mutex_t nlib_mutex
The type for mutex variables.
#define NLIB_ALWAYS_INLINE
Indicates that the compiler is forced to perform inline expansion of functions.
Definition: Platform_unix.h:59
int64_t nlib_atomic_xor_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_add_fetch64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
int32_t nlib_atomic_and_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange32(int32_t *ptr, int32_t *expected, int32_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
void nlib_atomic_store64(int64_t *ptr, int64_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int nlib_ctz64(uint64_t x)
Returns the number of consecutive zero bits, with respect to the least significant bit (LSB)...
int64_t nlib_atomic_or_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
int32_t nlib_atomic_fetch_or32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
int64_t nlib_atomic_fetch_xor64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
int32_t nlib_atomic_fetch_add32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
void nlib_atomic_store32(int32_t *ptr, int32_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int nlib_clz64(uint64_t x)
Returns the number of consecutive zero bits, with respect to the most significant bit (MSB)...
int32_t nlib_atomic_exchange32(int32_t *ptr, int32_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
pthread_t nlib_thread
The identifier for threads.
int64_t nlib_atomic_load64(const int64_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...