nlib
Platform_unix.h
1 
2 /*---------------------------------------------------------------------------*
3 
4  Project: CrossRoad
5  Copyright (C)2012-2016 Nintendo. All rights reserved.
6 
7  These coded instructions, statements, and computer programs contain
8  proprietary information of Nintendo of America Inc. and/or Nintendo
9  Company Ltd., and are protected by Federal copyright law. They may
10  not be disclosed to third parties or copied or duplicated in any form,
11  in whole or in part, without the prior written consent of Nintendo.
12 
13  *---------------------------------------------------------------------------*/
14 
15 #pragma once
16 #ifndef INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
17 #define INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
18 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
19 # error do not include directly
20 #endif
21 
22 #if defined(__linux__) || \
23  defined(__FreeBSD__) || \
24  defined(__CYGWIN__) || \
25  (defined(__APPLE__) && defined(__MACH__))
26 
27 #ifdef __cplusplus
28 // http://cpprefjp.github.io/reference/cstdint.html
29 // Older libc may require __STDC_LIMIT_MACROS and __STDC_CONSTANT_MACROS defined.
30 #ifndef __STDC_LIMIT_MACROS
31 #warning __STDC_LIMIT_MACROS not defined, compile may fail
32 #define __STDC_LIMIT_MACROS
33 #endif
34 #ifndef __STDC_CONSTANT_MACROS
35 #warning __STDC_CONSTANT_MACROS not defined, compile may fail
36 #define __STDC_CONSTANT_MACROS
37 #endif
38 #endif
39 
40 #if defined(__APPLE__) && defined(__MACH__)
41 #define _DARWIN_UNLIMITED_SELECT
42 #include <libkern/OSAtomic.h>
43 #include <errno.h>
44 #if __has_include( <os/lock.h> )
45 #include <os/lock.h>
46 #endif
47 #endif
48 
49 #ifdef __cplusplus
50 extern "C" {
51 #endif
52 
53 #ifndef NLIB_UNIX
54 # define NLIB_UNIX
55 #endif
56 
57 // For now, only supports gcc or clang
58 #if !defined(__GNUC__) && !defined(__clang__)
59 # error
60 #endif
61 
62 #define NLIB_HAS_STDHEADER_INTTYPES
63 
64 #if !defined(__FreeBSD__) && !defined(__APPLE__)
65 // checking __GNU_LIBRARY__, __GLIBC__, __GLIBC_MINOR__ to detect glibc
66 #include <features.h>
67 #endif
68 #include <pthread.h> // for PTHREAD_MUTEX_INITIALIZER, ....
69 #include <semaphore.h> // for sem_t
70 #include <sys/types.h> // for pthread_mutex_t, ....
71 #include <sys/uio.h> // struct iovec
72 #include <fcntl.h> // NOLINT
73 #include <sys/socket.h>
74 #include <netinet/tcp.h> // TCP_FASTOPEN
75 #include <poll.h>
76 #include <netinet/in.h>
77 #include <arpa/inet.h>
78 #include <netdb.h>
79 #include <stdint.h>
80 
81 #if defined(__i386__) || defined(__x86_64__)
82 # include <x86intrin.h>
83 #endif
84 
85 #ifndef __CYGWIN__
86 # define NLIB_VIS_HIDDEN __attribute__((visibility("hidden")))
87 # define NLIB_VIS_PUBLIC __attribute__((visibility("default")))
88 # define NLIB_WEAKSYMBOL __attribute__((weak))
89 #else
90 # define NLIB_VIS_HIDDEN
91 # define NLIB_VIS_PUBLIC
92 # define NLIB_WEAKSYMBOL
93 #endif
94 
95 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
96 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
97 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
98 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
99 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
100 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
101 #define NLIB_NORETURN __attribute__((noreturn))
102 #define NLIB_NONNULL __attribute__((nonnull))
103 #define NLIB_NONNULL_1 __attribute__((nonnull (1)))
104 #define NLIB_NONNULL_2 __attribute__((nonnull (2)))
105 #define NLIB_NONNULL_3 __attribute__((nonnull (3)))
106 #define NLIB_NONNULL_4 __attribute__((nonnull (4)))
107 #define NLIB_NONNULL_5 __attribute__((nonnull (5)))
108 #define NLIB_NONNULL_ENABLED
109 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
110 #define NLIB_ATTRIBUTE_PURE __attribute__((pure))
111 #define NLIB_ATTRIBUTE_CONST __attribute__((const))
112 
113 #ifdef __clang__
114 # if __has_attribute(alloc_size)
115 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
116 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
117 # else
118 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
119 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
120 # endif
121 # if __has_attribute(alloc_align)
122 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
123 # else
124 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
125 # endif
126 # if __has_attribute(assume_aligned)
127 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
128 # else
129 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
130 # endif
131 #else
132 # define NLIB_ATTRIBUTE_ALLOC_SIZE1(n) __attribute__((alloc_size(n)))
133 # define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1) __attribute__((alloc_size(n0, n1)))
134 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)
135 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn) __attribute__((alloc_align(algn)))
136 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n) __attribute__((assume_aligned(n)))
137 # else
138 # define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
139 # define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
140 # endif
141 #endif
142 
143 #ifndef NLIB_DEPRECATED
144 #define NLIB_DEPRECATED __attribute__((deprecated))
145 #endif
146 #ifndef NLIB_DEPRECATED_MSG
147 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated))
148 #endif
149 
150 #if defined(__LITTLE_ENDIAN__) || defined(__LITTLE_ENDIAN)
151 # define NLIB_LITTLE_ENDIAN
152 #elif defined(__BIG_ENDIAN__) || defined(__BIG_ENDIAN)
153 # undef NLIB_LITTLE_ENDIAN
154 #else
155 # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
156 # define NLIB_LITTLE_ENDIAN
157 # else
158 # undef NLIB_LITTLE_ENDIAN
159 # endif
160 #endif
161 #if defined(__x86_64__) || defined(__aarch64__)
162 # define NLIB_64BIT
163 #endif
164 
165 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7
166 # if !defined(__i386__) && !defined(__x86_64__)
167 # error Sorry
168 # endif
169 #define NLIB_MEMORY_ORDER_RELEASE __asm__ __volatile__("sfence": : :"memory")
170 #define NLIB_MEMORY_ORDER_ACQUIRE __asm__ __volatile__("lfence": : :"memory")
171 #define NLIB_MEMORY_ORDER_ACQ_REL __asm__ __volatile__("mfence": : :"memory")
172 #define NLIB_MEMORY_ORDER_SEQ_CST __sync_synchronize()
173 #else
174 #define NLIB_MEMORY_ORDER_RELEASE __atomic_thread_fence(__ATOMIC_RELEASE)
175 #define NLIB_MEMORY_ORDER_ACQUIRE __atomic_thread_fence(__ATOMIC_ACQUIRE)
176 #define NLIB_MEMORY_ORDER_ACQ_REL __atomic_thread_fence(__ATOMIC_ACQ_REL)
177 #define NLIB_MEMORY_ORDER_SEQ_CST __atomic_thread_fence(__ATOMIC_SEQ_CST)
178 #endif
179 
180 typedef pthread_key_t nlib_tls;
181 #define NLIB_PTHREAD_nlib_tls_alloc
182 #define NLIB_PTHREAD_nlib_tls_free
183 #define NLIB_PTHREAD_nlib_tls_setvalue
184 #define NLIB_PTHREAD_nlib_tls_getvalue
185 
186 #ifndef _LIBCPP_VERSION
187 NLIB_CAPABILITY("mutex")
188 #endif
189 typedef pthread_mutex_t nlib_mutex;
190 
191 #ifdef PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
192 # ifdef __FreeBSD__
193 // https://stackoverflow.com/questions/10369606/constexpr-pointer-value
194 // ((pthread_mutex_t)1) in FreeBSD
195 # define NLIB_MUTEX_INITIALIZER (__builtin_constant_p(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP) ? \
196  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP : \
197  PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP)
198 # else
199 # define NLIB_MUTEX_INITIALIZER PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
200 # endif
201 #else
202 # define NLIB_PTHREAD_nlib_mutex_init
203 # define NLIB_MUTEX_INITIALIZER PTHREAD_MUTEX_INITIALIZER
204 #endif
205 
206 #ifndef __FreeBSD__
207 #define NLIB_PTHREAD_nlib_mutex_lock
208 #define NLIB_PTHREAD_nlib_mutex_unlock
209 #define NLIB_PTHREAD_nlib_mutex_trylock
210 #define NLIB_PTHREAD_nlib_mutex_destroy
211 #endif
212 
213 #if defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP)
214 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
215 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP
216 #elif defined(PTHREAD_RECURSIVE_MUTEX_INITIALIZER)
217 # define NLIB_RECURSIVE_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
218 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER PTHREAD_RECURSIVE_MUTEX_INITIALIZER
219 #elif defined(__FreeBSD__)
220 // FreeBSD does not support static initializer for recursive mutex
221 // nlib initializes recursive mutex if the value is 255
222 # define NLIB_RECURSIVE_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
223  (pthread_mutex_t)255 : (pthread_mutex_t)255)
224 # define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER (__builtin_constant_p((pthread_mutex_t)255) ? \
225  (pthread_mutex_t)255 : (pthread_mutex_t)255)
226 #elif defined(NLIB_ALPINE)
227 // hacked...
228 #define NLIB_RECURSIVE_MUTEX_INITIALIZER {{{1}}}
229 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER {{{1}}}
230 #else
231 # error Sorry
232 #endif
233 
234 #if defined(__APPLE__)
235 typedef int nlib_semaphore;
236 #else
237 typedef sem_t nlib_semaphore;
238 #endif
239 
240 typedef pthread_cond_t nlib_cond;
241 #define NLIB_COND_INITIALIZER PTHREAD_COND_INITIALIZER
242 
243 #define NLIB_PTHREAD_nlib_cond_init
244 #define NLIB_PTHREAD_nlib_cond_signal
245 #define NLIB_PTHREAD_nlib_cond_broadcast
246 #define NLIB_PTHREAD_nlib_cond_wait
247 #define NLIB_PTHREAD_nlib_cond_destroy
248 
249 typedef pthread_t nlib_thread;
250 
251 #define NLIB_PTHREAD_nlib_thread_join
252 #define NLIB_PTHREAD_nlib_thread_detach
253 #define NLIB_PTHREAD_nlib_thread_equal
254 #define NLIB_PTHREAD_nlib_thread_self
255 
256 #if defined(__APPLE__)
257 #define NLIB_SPINLOCK_HAS_NATIVE
258 #if 0 && __has_include( <os/lock.h> ) && (MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_12)
259 typedef os_unfair_lock_t nlib_spinlock;
260 #define NLIB_SPINLOCK_INITIALIZER OS_UNFAIR_LOCK_INIT
261 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
262  *lock = OS_UNFAIR_LOCK_INIT;
263 }
264 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
265  os_unfair_lock_lock(*lock);
266 }
267 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
268  return os_unfair_lock_trylock(*lock) ? 0 : EBUSY;
269 }
270 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
271  os_unfair_lock_unlock(*lock);
272 }
273 #else
274 typedef OSSpinLock nlib_spinlock;
275 #define NLIB_SPINLOCK_INITIALIZER (0)
276 static NLIB_ALWAYS_INLINE void nlib_spinlock_init(nlib_spinlock* lock) {
277  *lock = 0;
278 }
279 static NLIB_ALWAYS_INLINE void nlib_spinlock_lock(nlib_spinlock* lock) {
280  OSSpinLockLock(lock);
281 }
282 static NLIB_ALWAYS_INLINE int nlib_spinlock_trylock(nlib_spinlock* lock) {
283  return OSSpinLockTry(lock) ? 0 : EBUSY;
284 }
285 static NLIB_ALWAYS_INLINE void nlib_spinlock_unlock(nlib_spinlock* lock) {
286  OSSpinLockUnlock(lock);
287 }
288 #endif
289 #endif
290 
291 #ifdef __cplusplus
292 }
293 #endif
294 
295 #if defined(__clang__)
296 # if __has_feature(cxx_unicode_literals)
297 # define NLIB_CXX11_NEW_CHARACTER_TYPES
298 # endif
299 # if __has_feature(cxx_exceptions)
300 # if __has_feature(cxx_noexcept)
301 # define NLIB_CXX11_NOEXCEPT
302 # endif
303 # else
304 # define NLIB_NOEXCEPT
305 # endif
306 #else
307 // __GNUC__
308 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 4)
309 # define NLIB_CXX11_NEW_CHARACTER_TYPES
310 # endif
311 # if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)
312 # define NLIB_CXX11_NOEXCEPT
313 # endif
314 // # if !defined(__CYGWIN__) && (!defined(__GLIBC__) || __GLIBC__ < 2 || __GLIBC_MINOR__ < 1)
315 // # error Sorry, glibc is old.
316 // # endif
317 #endif
318 
319 #define NLIB_TIMESPEC_HAS_NATIVE
320 #define NLIB_IOVEC_HAS_NATIVE
321 
322 #ifdef PTHREAD_RWLOCK_INITIALIZER
323 # define NLIB_RWLOCK_HAS_NATIVE
324 #endif
325 #ifdef NLIB_RWLOCK_HAS_NATIVE
326 #ifndef _LIBCPP_VERSION
327 NLIB_CAPABILITY("mutex")
328 #endif
329 typedef pthread_rwlock_t nlib_rwlock;
330 #define NLIB_RWLOCK_INITIALIZER PTHREAD_RWLOCK_INITIALIZER
331 
332 #define NLIB_PTHREAD_nlib_rwlock_init
333 #define NLIB_PTHREAD_nlib_rwlock_destroy
334 #define NLIB_PTHREAD_nlib_rwlock_tryrdlock
335 #define NLIB_PTHREAD_nlib_rwlock_trywrlock
336 #define NLIB_PTHREAD_nlib_rwlock_rdlock
337 #define NLIB_PTHREAD_nlib_rwlock_rdunlock
338 #define NLIB_PTHREAD_nlib_rwlock_wrlock
339 #define NLIB_PTHREAD_nlib_rwlock_wrunlock
340 #endif
341 
342 #ifdef PTHREAD_BARRIER_SERIAL_THREAD
343 # define NLIB_BARRIER_HAS_NATIVE
344 #endif
345 #ifdef NLIB_BARRIER_HAS_NATIVE
346 typedef pthread_barrier_t nlib_barrier;
347 #define NLIB_PTHREAD_nlib_barrier_init
348 #define NLIB_PTHREAD_nlib_barrier_destroy
349 #endif
350 
351 #define NLIB_THREAD_ATTR_HAS_NATIVE
352 
353 #ifndef pthread_cleanup_push
354 # error pthread_cleanup_push must be a macro
355 #endif
356 
357 #ifndef pthread_cleanup_pop
358 # error pthread_cleanup_pop must be a macro
359 #endif
360 
361 #ifdef __linux__
362 // NOTE:
363 // The implementation by timerfd_create() and epoll() consumes file descriptors.
364 // The difference of the maximum numbers of file descriptors might limit the portability.
365 // Also, the performance is not better than our generic version.
366 // #define NLIB_TIMER_HAS_NATIVE
367 #endif
368 
369 #define NLIB_LIBC_nlib_memcmp
370 #define NLIB_LIBC_nlib_strlen
371 #define NLIB_LIBC_nlib_strnlen
372 #if defined(__STDC_LIB_EXT1__)
373 # define NLIB_LIBC_nlib_wcslen
374 # define NLIB_LIBC_nlib_wcsnlen
375 # define NLIB_LIBC_nlib_strncpy
376 # define NLIB_LIBC_nlib_strcpy
377 # define NLIB_LIBC_nlib_wcsncpy
378 # define NLIB_LIBC_nlib_wcscpy
379 #endif
380 #define NLIB_LIBC_nlib_strchr
381 #define NLIB_LIBC_nlib_strrchr
382 
383 #ifdef __cplusplus
384 extern "C" {
385 #endif
386 
387 #if (defined(__clang__) && defined(NLIB_64BIT)) || \
388  (defined(__GNUC__) && __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
389 
390 #define NLIB_ATOMIC_RELAXED __ATOMIC_RELAXED
391 #define NLIB_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
392 #define NLIB_ATOMIC_RELEASE __ATOMIC_RELEASE
393 #define NLIB_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
394 #define NLIB_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
395 
396 #if defined(NLIB_DOXYGEN)
397 int32_t nlib_atomic_load32(const int32_t* ptr, int memorder);
398 void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder);
399 int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val, int memorder);
400 int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
401  int32_t desired, int weak,
402  int success_memorder, int failure_memorder);
403 int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val, int memorder);
404 int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val, int memorder);
405 int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val, int memorder);
406 int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val, int memorder);
407 int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val, int memorder);
408 int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val, int memorder);
409 int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val, int memorder);
410 int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val, int memorder);
411 int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val, int memorder);
412 int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val, int memorder);
413 
414 int64_t nlib_atomic_load64(const int64_t* ptr, int memorder);
415 void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder);
416 int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val, int memorder);
417 int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
418  int64_t desired, int weak,
419  int success_memorder, int failure_memorder);
420 int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val, int memorder);
421 int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val, int memorder);
422 int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val, int memorder);
423 int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val, int memorder);
424 int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val, int memorder);
425 int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val, int memorder);
426 int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val, int memorder);
427 int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val, int memorder);
428 int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val, int memorder);
429 int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val, int memorder);
430 
431 void* nlib_atomic_loadptr(void* const* ptr, int memorder);
432 void nlib_atomic_storeptr(void** ptr, void* val, int memorder);
433 void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder);
434 int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
435  void* desired, int weak,
436  int success_memorder, int failure_memorder);
437 void nlib_atomic_thread_fence(int memorder);
438 #endif
439 
440 NLIB_VIS_PUBLIC extern pthread_mutex_t nlib_tsan_lock;
441 #if defined(__has_feature) && __has_feature(thread_sanitizer)
442 #define NLIB_TSAN_LOCK pthread_mutex_lock(&nlib_tsan_lock);
443 #define NLIB_TSAN_UNLOCK pthread_mutex_unlock(&nlib_tsan_lock);
444 #else
445 #define NLIB_TSAN_LOCK
446 #define NLIB_TSAN_UNLOCK
447 #endif
448 
449 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
450  int32_t rval;
451  NLIB_TSAN_LOCK
452  rval = __atomic_load_n(ptr, memorder);
453  NLIB_TSAN_UNLOCK
454  return rval;
455 }
456 
457 static __inline void nlib_atomic_store32(int32_t* ptr, int32_t val, int memorder) {
458  NLIB_TSAN_LOCK
459  __atomic_store_n(ptr, val, memorder);
460  NLIB_TSAN_UNLOCK
461 }
462 
463 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
464  int memorder) {
465  int32_t rval;
466  NLIB_TSAN_LOCK
467  rval = __atomic_exchange_n(ptr, val, memorder);
468  NLIB_TSAN_UNLOCK
469  return rval;
470 }
471 
472 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
473  int32_t desired, int weak,
474  int success_memorder, int failure_memorder) {
475  int32_t rval;
476  NLIB_TSAN_LOCK
477  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
478  success_memorder, failure_memorder);
479  NLIB_TSAN_UNLOCK
480  return rval;
481 }
482 
483 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
484  int memorder) {
485  int32_t rval;
486  NLIB_TSAN_LOCK
487  rval = __atomic_add_fetch(ptr, val, memorder);
488  NLIB_TSAN_UNLOCK
489  return rval;
490 }
491 
492 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
493  int memorder) {
494  int32_t rval;
495  NLIB_TSAN_LOCK
496  rval = __atomic_sub_fetch(ptr, val, memorder);
497  NLIB_TSAN_UNLOCK
498  return rval;
499 }
500 
501 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
502  int memorder) {
503  int32_t rval;
504  NLIB_TSAN_LOCK
505  rval = __atomic_and_fetch(ptr, val, memorder);
506  NLIB_TSAN_UNLOCK
507  return rval;
508 }
509 
510 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
511  int memorder) {
512  int32_t rval;
513  NLIB_TSAN_LOCK
514  rval = __atomic_xor_fetch(ptr, val, memorder);
515  NLIB_TSAN_UNLOCK
516  return rval;
517 }
518 
519 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
520  int memorder) {
521  int32_t rval;
522  NLIB_TSAN_LOCK
523  rval = __atomic_or_fetch(ptr, val, memorder);
524  NLIB_TSAN_UNLOCK
525  return rval;
526 }
527 
528 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
529  int memorder) {
530  int32_t rval;
531  NLIB_TSAN_LOCK
532  rval = __atomic_fetch_add(ptr, val, memorder);
533  NLIB_TSAN_UNLOCK
534  return rval;
535 }
536 
537 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
538  int memorder) {
539  int32_t rval;
540  NLIB_TSAN_LOCK
541  rval = __atomic_fetch_sub(ptr, val, memorder);
542  NLIB_TSAN_UNLOCK
543  return rval;
544 }
545 
546 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
547  int memorder) {
548  int32_t rval;
549  NLIB_TSAN_LOCK
550  rval = __atomic_fetch_and(ptr, val, memorder);
551  NLIB_TSAN_UNLOCK
552  return rval;
553 }
554 
555 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
556  int memorder) {
557  int32_t rval;
558  NLIB_TSAN_LOCK
559  rval = __atomic_fetch_xor(ptr, val, memorder);
560  NLIB_TSAN_UNLOCK
561  return rval;
562 }
563 
564 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
565  int memorder) {
566  int32_t rval;
567  NLIB_TSAN_LOCK
568  rval = __atomic_fetch_or(ptr, val, memorder);
569  NLIB_TSAN_UNLOCK
570  return rval;
571 }
572 
573 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
574  int64_t rval;
575  NLIB_TSAN_LOCK
576  rval = __atomic_load_n(ptr, memorder);
577  NLIB_TSAN_UNLOCK
578  return rval;
579 }
580 
581 static __inline void nlib_atomic_store64(int64_t* ptr, int64_t val, int memorder) {
582  NLIB_TSAN_LOCK
583  __atomic_store_n(ptr, val, memorder);
584  NLIB_TSAN_UNLOCK
585 }
586 
587 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
588  int memorder) {
589  int64_t rval;
590  NLIB_TSAN_LOCK
591  rval = __atomic_exchange_n(ptr, val, memorder);
592  NLIB_TSAN_UNLOCK
593  return rval;
594 }
595 
596 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
597  void* rval;
598  NLIB_TSAN_LOCK
599  rval = __atomic_exchange_n(ptr, val, memorder);
600  NLIB_TSAN_UNLOCK
601  return rval;
602 }
603 
604 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
605  int64_t desired, int weak,
606  int success_memorder, int failure_memorder) {
607  int64_t rval;
608  NLIB_TSAN_LOCK
609  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
610  success_memorder, failure_memorder);
611  NLIB_TSAN_UNLOCK
612  return rval;
613 }
614 
615 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
616  int memorder) {
617  int64_t rval;
618  NLIB_TSAN_LOCK
619  rval = __atomic_add_fetch(ptr, val, memorder);
620  NLIB_TSAN_UNLOCK
621  return rval;
622 }
623 
624 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
625  int memorder) {
626  int64_t rval;
627  NLIB_TSAN_LOCK
628  rval = __atomic_sub_fetch(ptr, val, memorder);
629  NLIB_TSAN_UNLOCK
630  return rval;
631 }
632 
633 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
634  int memorder) {
635  int64_t rval;
636  NLIB_TSAN_LOCK
637  rval = __atomic_and_fetch(ptr, val, memorder);
638  NLIB_TSAN_UNLOCK
639  return rval;
640 }
641 
642 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
643  int memorder) {
644  int64_t rval;
645  NLIB_TSAN_LOCK
646  rval = __atomic_xor_fetch(ptr, val, memorder);
647  NLIB_TSAN_UNLOCK
648  return rval;
649 }
650 
651 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
652  int memorder) {
653  int64_t rval;
654  NLIB_TSAN_LOCK
655  rval = __atomic_or_fetch(ptr, val, memorder);
656  NLIB_TSAN_UNLOCK
657  return rval;
658 }
659 
660 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
661  int memorder) {
662  int64_t rval;
663  NLIB_TSAN_LOCK
664  rval = __atomic_fetch_add(ptr, val, memorder);
665  NLIB_TSAN_UNLOCK
666  return rval;
667 }
668 
669 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
670  int memorder) {
671  int64_t rval;
672  NLIB_TSAN_LOCK
673  rval = __atomic_fetch_sub(ptr, val, memorder);
674  NLIB_TSAN_UNLOCK
675  return rval;
676 }
677 
678 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
679  int memorder) {
680  int64_t rval;
681  NLIB_TSAN_LOCK
682  rval = __atomic_fetch_and(ptr, val, memorder);
683  NLIB_TSAN_UNLOCK
684  return rval;
685 }
686 
687 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
688  int memorder) {
689  int64_t rval;
690  NLIB_TSAN_LOCK
691  rval = __atomic_fetch_xor(ptr, val, memorder);
692  NLIB_TSAN_UNLOCK
693  return rval;
694 }
695 
696 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
697  int memorder) {
698  int64_t rval;
699  NLIB_TSAN_LOCK
700  rval = __atomic_fetch_or(ptr, val, memorder);
701  NLIB_TSAN_UNLOCK
702  return rval;
703 }
704 
705 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
706  void* rval;
707  NLIB_TSAN_LOCK
708  rval = __atomic_load_n(ptr, memorder);
709  NLIB_TSAN_UNLOCK
710  return rval;
711 }
712 
713 static __inline void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
714  NLIB_TSAN_LOCK
715  __atomic_store_n(ptr, val, memorder);
716  NLIB_TSAN_UNLOCK
717 }
718 
719 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
720  void* desired, int weak,
721  int success_memorder, int failure_memorder) {
722  int rval;
723  NLIB_TSAN_LOCK
724  rval = __atomic_compare_exchange_n(ptr, expected, desired, weak,
725  success_memorder, failure_memorder);
726  NLIB_TSAN_UNLOCK
727  return rval;
728 }
729 
730 static __inline void nlib_atomic_thread_fence(int memorder) {
731  __atomic_thread_fence(memorder);
732 }
733 
734 #else
735 #define NLIB_ATOMIC_RELAXED 0
736 #define NLIB_ATOMIC_ACQUIRE 1
737 #define NLIB_ATOMIC_RELEASE 2
738 #define NLIB_ATOMIC_ACQ_REL 3
739 #define NLIB_ATOMIC_SEQ_CST 7
740 
741 static __inline int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
742  int32_t rval = *(volatile int32_t*)ptr;
743  (void)memorder;
744 #if !defined(__i386__) && !defined(__x86_64__)
745  if (memorder & NLIB_ATOMIC_ACQUIRE)
747 #endif
748  return rval;
749 }
750 
751 static NLIB_ALWAYS_INLINE void nlib_atomic_store32(int32_t* ptr, int32_t val,
752  int memorder) {
753  if (memorder == NLIB_ATOMIC_SEQ_CST)
754  __sync_synchronize();
755  else if (memorder & NLIB_ATOMIC_RELEASE)
757  __sync_lock_test_and_set(ptr, val);
758 }
759 
760 static __inline int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
761  int memorder) {
762  if (memorder == NLIB_ATOMIC_SEQ_CST)
763  __sync_synchronize();
764  else if (memorder & NLIB_ATOMIC_RELEASE)
766  return __sync_lock_test_and_set(ptr, val);
767 }
768 
769 static __inline int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
770  int32_t desired, int weak,
771  int success_memorder, int failure_memorder) {
772  int32_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
773  if (old == *expected) return 1;
774  *expected = old;
775 
776  (void)weak;
777  (void)success_memorder;
778  (void)failure_memorder;
779  return 0;
780 }
781 
782 static __inline int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
783  int memorder) {
784  (void)memorder;
785  return __sync_add_and_fetch(ptr, val);
786 }
787 
788 static __inline int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
789  int memorder) {
790  (void)memorder;
791  return __sync_sub_and_fetch(ptr, val);
792 }
793 
794 static __inline int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
795  int memorder) {
796  (void)memorder;
797  return __sync_and_and_fetch(ptr, val);
798 }
799 
800 static __inline int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
801  int memorder) {
802  (void)memorder;
803  return __sync_xor_and_fetch(ptr, val);
804 }
805 
806 static __inline int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
807  int memorder) {
808  (void)memorder;
809  return __sync_or_and_fetch(ptr, val);
810 }
811 
812 static __inline int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
813  int memorder) {
814  (void)memorder;
815  return __sync_fetch_and_add(ptr, val);
816 }
817 
818 static __inline int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
819  int memorder) {
820  (void)memorder;
821  return __sync_fetch_and_sub(ptr, val);
822 }
823 
824 static __inline int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
825  int memorder) {
826  (void)memorder;
827  return __sync_fetch_and_and(ptr, val);
828 }
829 
830 static __inline int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
831  int memorder) {
832  (void)memorder;
833  return __sync_fetch_and_xor(ptr, val);
834 }
835 
836 static __inline int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
837  int memorder) {
838  (void)memorder;
839  return __sync_fetch_and_or(ptr, val);
840 }
841 
842 static __inline int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
843  int64_t rval = *(volatile int64_t*)ptr;
844  (void)memorder;
845 #if !defined(__i386__) && !defined(__x86_64__)
846  if (memorder & NLIB_ATOMIC_ACQUIRE)
848 #endif
849  return rval;
850 }
851 
852 static NLIB_ALWAYS_INLINE void nlib_atomic_store64(int64_t* ptr, int64_t val,
853  int memorder) {
854  if (memorder == NLIB_ATOMIC_SEQ_CST)
855  __sync_synchronize();
856  else if (memorder & NLIB_ATOMIC_RELEASE)
858  __sync_lock_test_and_set(ptr, val);
859 }
860 
861 static __inline int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
862  int memorder) {
863  if (memorder == NLIB_ATOMIC_SEQ_CST)
864  __sync_synchronize();
865  else if (memorder & NLIB_ATOMIC_RELEASE)
867  return __sync_lock_test_and_set(ptr, val);
868 }
869 
870 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
871  if (memorder == NLIB_ATOMIC_SEQ_CST)
872  __sync_synchronize();
873  else if (memorder & NLIB_ATOMIC_RELEASE)
875  return __sync_lock_test_and_set(ptr, val);
876 }
877 
878 static __inline int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
879  int64_t desired, int weak,
880  int success_memorder, int failure_memorder) {
881  int64_t old = __sync_val_compare_and_swap(ptr, *expected, desired);
882  if (old == *expected) return 1;
883  *expected = old;
884 
885  (void)weak;
886  (void)success_memorder;
887  (void)failure_memorder;
888  return 0;
889 }
890 
891 static __inline int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
892  int memorder) {
893  (void)memorder;
894  return __sync_add_and_fetch(ptr, val);
895 }
896 
897 static __inline int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
898  int memorder) {
899  (void)memorder;
900  return __sync_sub_and_fetch(ptr, val);
901 }
902 
903 static __inline int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
904  int memorder) {
905  (void)memorder;
906  return __sync_and_and_fetch(ptr, val);
907 }
908 
909 static __inline int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
910  int memorder) {
911  (void)memorder;
912  return __sync_xor_and_fetch(ptr, val);
913 }
914 
915 static __inline int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
916  int memorder) {
917  (void)memorder;
918  return __sync_or_and_fetch(ptr, val);
919 }
920 
921 static __inline int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
922  int memorder) {
923  (void)memorder;
924  return __sync_fetch_and_add(ptr, val);
925 }
926 
927 static __inline int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
928  int memorder) {
929  (void)memorder;
930  return __sync_fetch_and_sub(ptr, val);
931 }
932 
933 static __inline int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
934  int memorder) {
935  (void)memorder;
936  return __sync_fetch_and_and(ptr, val);
937 }
938 
939 static __inline int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
940  int memorder) {
941  (void)memorder;
942  return __sync_fetch_and_xor(ptr, val);
943 }
944 
945 static __inline int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
946  int memorder) {
947  (void)memorder;
948  return __sync_fetch_and_or(ptr, val);
949 }
950 
951 static __inline void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
952  void* rval = *(void* volatile *)ptr; // NOLINT
953  (void)memorder;
954 #if !defined(__i386__) && !defined(__x86_64__)
955  if (memorder & NLIB_ATOMIC_ACQUIRE)
957 #endif
958  return rval;
959 }
960 
961 static NLIB_ALWAYS_INLINE void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
962  if (memorder == NLIB_ATOMIC_SEQ_CST)
963  __sync_synchronize();
964  else if (memorder & NLIB_ATOMIC_RELEASE)
966  void* tmp = __sync_lock_test_and_set(ptr, val);
967  (void)tmp;
968 }
969 
970 static __inline int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
971  void* desired, int weak,
972  int success_memorder, int failure_memorder) {
973  void* old = __sync_val_compare_and_swap(ptr, *expected, desired);
974  if (old == *expected) return 1;
975  *expected = old;
976 
977  (void)weak;
978  (void)success_memorder;
979  (void)failure_memorder;
980  return 0;
981 }
982 
983 static __inline void nlib_atomic_thread_fence(int memorder) {
984  switch (memorder) {
985  case NLIB_ATOMIC_RELAXED:
986  break;
987  case NLIB_ATOMIC_ACQUIRE:
989  break;
990  case NLIB_ATOMIC_RELEASE:
992  break;
993  case NLIB_ATOMIC_ACQ_REL:
995  break;
996  default:
998  break;
999  }
1000 }
1001 #endif
1002 
1003 
1004 #ifdef __cplusplus
1005 }
1006 #endif
1007 
1008 #endif
1009 #endif // INCLUDE_NN_NLIB_PLATFORM_UNIX_H_
int32_t nlib_atomic_xor_fetch32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の排他的論理和の計算を行います。動作はgccの__atomic_xor_fetch()に準じます。 ...
int64_t nlib_atomic_fetch_and64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の論理積の計算を行います。動作はgccの__atomic_fetch_and()に準じます。 ...
void * nlib_atomic_exchangeptr(void **ptr, void *val, int memorder)
アトミックに値を入れ替えます。動作はgccの__atomic_exchange_n()に準じます。
static void nlib_spinlock_unlock(nlib_spinlock *lock)
スピンロックをアンロックします。
Definition: Platform.h:2747
int nlib_atomic_compare_exchangeptr(void **ptr, void **expected, void *desired, int weak, int success_memorder, int failure_memorder)
アトミックな値の比較と入れ替えを行います。動作はgccの__atomic_compare_exchange_n()に準じます。 ...
int32_t nlib_atomic_load32(const int32_t *ptr, int memorder)
アトミックに値をロードします。動作はgccの__atomic_load_n()に準じます。
#define NLIB_ALWAYS_INLINE
コンパイラに関数をインライン展開するように強く示します。
Definition: Platform_unix.h:95
int64_t nlib_atomic_fetch_add64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の加算を行います。動作はgccの__atomic_fetch_add()に準じます。
struct nlib_rwlock_ nlib_rwlock
リードライトロックオブジェクトの型です。
Definition: Platform.h:1157
sem_t nlib_semaphore
セマフォオブジェクトの型です。
#define NLIB_MEMORY_ORDER_ACQUIRE
メモリフェンスです。C++11ではatomic_thread_fence(memory_order_acquire)に一致します。 ...
#define NLIB_ATOMIC_RELEASE
gccの__ATOMIC_RELEASEやC++11のstd::memory_order_releaseに準じます。
int32_t nlib_atomic_or_fetch32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の論理和の計算を行います。動作はgccの__atomic_or_fetch()に準じます。 ...
int64_t nlib_atomic_fetch_sub64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の減算を行います。動作はgccの__atomic_fetch_sub()に準じます。
int64_t nlib_atomic_and_fetch64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の論理積の計算を行います。動作はgccの__atomic_and_fetch()に準じます。 ...
int nlib_atomic_compare_exchange64(int64_t *ptr, int64_t *expected, int64_t desired, int weak, int success_memorder, int failure_memorder)
アトミックな値の比較と入れ替えを行います。動作はgccの__atomic_compare_exchange_n()に準じます。 ...
#define NLIB_VIS_PUBLIC
関数やクラス等のシンボルをライブラリの外部に公開します。
Definition: Platform_unix.h:87
int64_t nlib_atomic_fetch_or64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の論理和の計算を行います。動作はgccの__atomic_fetch_or()に準じます。 ...
#define NLIB_ATOMIC_ACQ_REL
gccの__ATOMIC_ACQ_RELやC++11のstd::memory_order_acq_relに準じます。
#define NLIB_ATOMIC_ACQUIRE
gccの__ATOMIC_ACQUIREやC++11のstd::memory_order_acquireに準じます。
pthread_key_t nlib_tls
TLSスロットのIDを示す型です。
void * nlib_atomic_loadptr(void *const *ptr, int memorder)
アトミックに値をロードします。動作はgccの__atomic_load_n()に準じます。
int64_t nlib_atomic_exchange64(int64_t *ptr, int64_t val, int memorder)
アトミックに値を入れ替えます。動作はgccの__atomic_exchange_n()に準じます。
#define NLIB_MEMORY_ORDER_SEQ_CST
メモリフェンスです。C++11ではatomic_thread_fence(memory_order_seq_cst)に一致します。 ...
int64_t nlib_atomic_sub_fetch64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の減算を行います。動作はgccの__atomic_sub_fetch()に準じます。
int32_t nlib_atomic_fetch_xor32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の排他的論理和の計算を行います。動作はgccの__atomic_fetch_xor()に準じます。 ...
struct nlib_barrier_ nlib_barrier
バリアオブジェクトの型です。
Definition: Platform.h:1373
static void nlib_spinlock_init(nlib_spinlock *lock)
スピンロックを初期化します。
Definition: Platform.h:2683
#define NLIB_MEMORY_ORDER_ACQ_REL
メモリフェンスです。C++11ではatomic_thread_fence(memory_order_acq_rel)に一致します。 ...
int32_t nlib_atomic_sub_fetch32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の減算を行います。動作はgccの__atomic_sub_fetch()に準じます。
int32_t nlib_atomic_fetch_sub32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の減算を行います。動作はgccの__atomic_fetch_sub()に準じます。
int32_t nlib_atomic_add_fetch32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の加算を行います。動作はgccの__atomic_add_fetch()に準じます。
void nlib_atomic_storeptr(void **ptr, void *val, int memorder)
アトミックに値をストアします。動作はgccの__atomic_store_n()に準じます。
static errno_t nlib_spinlock_trylock(nlib_spinlock *lock)
スピンロックをロックします。成功した場合は0を返し、失敗した場合はEBUSYを返します。 ...
Definition: Platform.h:2716
セマフォを定義しています。
void nlib_atomic_thread_fence(int memorder)
指定されたメモリバリアを配置します。
int32_t nlib_atomic_fetch_and32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の論理積の計算を行います。動作はgccの__atomic_fetch_and()に準じます。 ...
pthread_cond_t nlib_cond
条件変数オブジェクトの型です。
int32_t nlib_spinlock
スピンロック変数の型です。NLIB_SPINLOCK_INITIALIZERにより静的に初期化して利用します。 ...
Definition: Platform.h:1489
#define NLIB_ATOMIC_RELAXED
gccの__ATOMIC_RELAXEDやC++11のstd::memory_order_relaxedに準じます。
pthread_mutex_t nlib_mutex
ミューテックス変数の型です。
int64_t nlib_atomic_xor_fetch64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の排他的論理和の計算を行います。動作はgccの__atomic_xor_fetch()に準じます。 ...
#define NLIB_MEMORY_ORDER_RELEASE
メモリフェンスです。C++11ではatomic_thread_fence(memory_order_release)に一致します。 ...
int64_t nlib_atomic_add_fetch64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の加算を行います。動作はgccの__atomic_add_fetch()に準じます。
int32_t nlib_atomic_and_fetch32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の論理積の計算を行います。動作はgccの__atomic_and_fetch()に準じます。 ...
int nlib_atomic_compare_exchange32(int32_t *ptr, int32_t *expected, int32_t desired, int weak, int success_memorder, int failure_memorder)
アトミックな値の比較と入れ替えを行います。動作はgccの__atomic_compare_exchange_n()に準じます。 ...
void nlib_atomic_store64(int64_t *ptr, int64_t val, int memorder)
アトミックに値をストアします。動作はgccの__atomic_store_n()に準じます。
int64_t nlib_atomic_or_fetch64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の論理和の計算を行います。動作はgccの__atomic_or_fetch()に準じます。 ...
#define NLIB_ATOMIC_SEQ_CST
gccの__ATOMIC_SEQ_CSTやC++11のstd::memory_order_seq_cstに準じます。
int32_t nlib_atomic_fetch_or32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の論理和の計算を行います。動作はgccの__atomic_fetch_or()に準じます。 ...
static void nlib_spinlock_lock(nlib_spinlock *lock)
スピンロックをロックします。再帰ロックを行った場合の動作は不定です。
Definition: Platform.h:2686
int64_t nlib_atomic_fetch_xor64(int64_t *ptr, int64_t val, int memorder)
アトミックな値の排他的論理和の計算を行います。動作はgccの__atomic_fetch_xor()に準じます。 ...
int32_t nlib_atomic_fetch_add32(int32_t *ptr, int32_t val, int memorder)
アトミックな値の加算を行います。動作はgccの__atomic_fetch_add()に準じます。
void nlib_atomic_store32(int32_t *ptr, int32_t val, int memorder)
アトミックに値をストアします。動作はgccの__atomic_store_n()に準じます。
int32_t nlib_atomic_exchange32(int32_t *ptr, int32_t val, int memorder)
アトミックに値を入れ替えます。動作はgccの__atomic_exchange_n()に準じます。
pthread_t nlib_thread
スレッドを指し示す識別子
int64_t nlib_atomic_load64(const int64_t *ptr, int memorder)
アトミックに値をロードします。動作はgccの__atomic_load_n()に準じます。