nlib
Platform_cafe.h
1 
2 /*--------------------------------------------------------------------------------*
3  Project: CrossRoad
4  Copyright (C)Nintendo All rights reserved.
5 
6  These coded instructions, statements, and computer programs contain proprietary
7  information of Nintendo and/or its licensed developers and are protected by
8  national and international copyright laws. They may not be disclosed to third
9  parties or copied or duplicated in any form, in whole or in part, without the
10  prior written consent of Nintendo.
11 
12  The content herein is highly confidential and should be handled accordingly.
13  *--------------------------------------------------------------------------------*/
14 
15 #pragma once
16 #ifndef INCLUDE_NN_NLIB_PLATFORM_CAFE_H_
17 #define INCLUDE_NN_NLIB_PLATFORM_CAFE_H_
18 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
19 # error do not include directly
20 #endif
21 #ifdef CAFE
22 
23 #ifdef __cplusplus
24 #ifndef __STDC_LIMIT_MACROS
25 #define __STDC_LIMIT_MACROS
26 #endif
27 #ifndef __STDC_CONSTANT_MACROS
28 #define __STDC_CONSTANT_MACROS
29 #endif
30 #endif
31 
32 #include <ppc_ps.h>
33 #include <cafe/os.h>
34 #include <stdint.h>
35 
36 #ifdef __cplusplus
37 extern "C" {
38 #endif
39 
40 #ifndef NLIB_CAFE_PPC
41 # define NLIB_CAFE_PPC
42 #endif
43 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
44 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
45 #ifdef __cplusplus
46 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
47 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
48 #else
49 #define NLIB_LIKELY(x) (x)
50 #define NLIB_UNLIKELY(x) (x)
51 #endif
52 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
53 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
54 #define NLIB_NORETURN __attribute__((noreturn))
55 #define NLIB_FALLTHROUGH
56 #define NLIB_NONNULL __attribute__((nonnull))
57 #define NLIB_NONNULL_1 __attribute__((nonnull (1)))
58 #define NLIB_NONNULL_2 __attribute__((nonnull (2)))
59 #define NLIB_NONNULL_3 __attribute__((nonnull (3)))
60 #define NLIB_NONNULL_4 __attribute__((nonnull (4)))
61 #define NLIB_NONNULL_5 __attribute__((nonnull (5)))
62 #define NLIB_NONNULL_ENABLED
63 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
64 #define NLIB_ATTRIBUTE_PURE __attribute__((pure))
65 #define NLIB_ATTRIBUTE_CONST __attribute__((const))
66 #define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
67 #define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
68 #define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
69 #define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
70 #ifndef NLIB_DEPRECATED
71 #define NLIB_DEPRECATED __attribute__((deprecated))
72 #endif
73 #ifndef NLIB_DEPRECATED_MSG
74 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated))
75 #endif
76 #define NLIB_VIS_HIDDEN
77 #define NLIB_VIS_PUBLIC
78 #define NLIB_WEAKSYMBOL __attribute__((weak))
79 
80 #define NLIB_MEMORY_ORDER_RELEASE __LWSYNC()
81 #define NLIB_MEMORY_ORDER_ACQUIRE __ISYNC()
82 #define NLIB_MEMORY_ORDER_ACQ_REL __LWSYNC(); __ISYNC()
83 #define NLIB_MEMORY_ORDER_SEQ_CST OSCoherencyBarrier()
84 
85 // GHS does not support '%zu', and cafe is 32bit environment
86 #define __PRIS_PREFIX
87 
88 typedef unsigned int nlib_tls;
89 
90 typedef OSFastMutex nlib_mutex;
91 #define NLIB_MUTEX_INITIALIZER {0}
92 #define NLIB_RECURSIVE_MUTEX_INITIALIZER {0}
93 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER {0}
94 
95 typedef void* nlib_thread;
96 
97 typedef OSSemaphore nlib_semaphore;
98 
99 typedef OSFastCond nlib_cond;
100 #define NLIB_COND_INITIALIZER {0}
101 
102 void MyNoreturn_(void) __attribute__((noreturn));
103 #define NLIB_ASSUME(cond) switch (0) case 0: default: if (cond) ; else MyNoreturn_() /* NOLINT */
104 
105 #define NLIB_ATOMIC_RELAXED (0)
106 #define NLIB_ATOMIC_ACQUIRE (1)
107 #define NLIB_ATOMIC_RELEASE (2)
108 #define NLIB_ATOMIC_ACQ_REL (3)
109 #define NLIB_ATOMIC_SEQ_CST (7)
110 
111 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
112  int32_t rval = *(volatile int32_t*)ptr; // NOLINT
113  if (memorder & NLIB_ATOMIC_ACQUIRE)
115  return rval;
116 }
117 
118 static NLIB_ALWAYS_INLINE void nlib_atomic_store32(int32_t* ptr, int32_t val,
119  int memorder) {
120  if (memorder == NLIB_ATOMIC_SEQ_CST)
121  OSCoherencyBarrier();
122  else if (memorder & NLIB_ATOMIC_RELEASE)
124  *(volatile int32_t*)ptr = val; // NOLINT
125  if (memorder == NLIB_ATOMIC_SEQ_CST)
126  OSCoherencyBarrier();
127 }
128 
129 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
130  int memorder) {
131  uint32_t x;
132  if (memorder == NLIB_ATOMIC_SEQ_CST)
133  OSCoherencyBarrier();
134  else if (memorder & NLIB_ATOMIC_RELEASE)
136  x = OSSwapAtomic((volatile OSAtomicVar*)ptr, (uint32_t)val); // NOLINT
137  if (memorder & NLIB_ATOMIC_ACQUIRE)
139  return (int32_t)x;
140 }
141 
142 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
143  return (void*)nlib_atomic_exchange32((int32_t*)ptr, (int32_t)val, memorder); // NOLINT
144 }
145 
146 static NLIB_ALWAYS_INLINE int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
147  int32_t desired, int weak,
148  int success_memorder,
149  int failure_memorder) {
150  if (success_memorder == NLIB_ATOMIC_SEQ_CST)
151  OSCoherencyBarrier();
152  else if (success_memorder & NLIB_ATOMIC_RELEASE)
154  if (weak == 0) {
155  BOOL result = OSCompareAndSwapAtomicEx(
156  (volatile OSAtomicVar*)ptr, // NOLINT
157  (u32)*expected, // NOLINT
158  (u32)desired, // NOLINT
159  (u32*)expected); // NOLINT
160  if (result) {
161  if (success_memorder & NLIB_ATOMIC_ACQUIRE)
163  } else {
164  if (failure_memorder & NLIB_ATOMIC_ACQUIRE)
166  }
167  return result;
168  } else {
169  u32 orig_val;
170  orig_val = (u32)__LWARX((u32*)ptr, 0);
171  if (orig_val == *expected) {
172  __DCBST(0, (u32)ptr);
173  if (__STWCX((u32*)ptr, 0, (u32)desired)) {
174  if (success_memorder & NLIB_ATOMIC_ACQUIRE)
176  return 1;
177  }
178  if (failure_memorder & NLIB_ATOMIC_ACQUIRE)
180  return 0;
181  } else {
182  *expected = (int32_t)orig_val;
183  if (failure_memorder & NLIB_ATOMIC_ACQUIRE)
185  return 0;
186  }
187  }
188 }
189 
190 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
191  int memorder) {
192  int32_t x;
193  if (memorder == NLIB_ATOMIC_SEQ_CST)
194  OSCoherencyBarrier();
195  else if (memorder & NLIB_ATOMIC_RELEASE)
197  x = OSAddAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
198  if (memorder & NLIB_ATOMIC_ACQUIRE)
200  return x + val;
201 }
202 
203 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
204  int memorder) {
205  int32_t x;
206  if (memorder == NLIB_ATOMIC_SEQ_CST)
207  OSCoherencyBarrier();
208  else if (memorder & NLIB_ATOMIC_RELEASE)
210  x = OSAddAtomic((volatile OSAtomicVar*)ptr, -val); // NOLINT
211  if (memorder & NLIB_ATOMIC_ACQUIRE)
213  return x - val;
214 }
215 
216 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
217  int memorder) {
218  int32_t x;
219  if (memorder == NLIB_ATOMIC_SEQ_CST)
220  OSCoherencyBarrier();
221  else if (memorder & NLIB_ATOMIC_RELEASE)
223  x = OSAndAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
224  if (memorder & NLIB_ATOMIC_ACQUIRE)
226  return x & val;
227 }
228 
229 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
230  int memorder) {
231  int32_t x;
232  if (memorder == NLIB_ATOMIC_SEQ_CST)
233  OSCoherencyBarrier();
234  else if (memorder & NLIB_ATOMIC_RELEASE)
236  x = OSXorAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
237  if (memorder & NLIB_ATOMIC_ACQUIRE)
239  return x ^ val;
240 }
241 
242 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
243  int memorder) {
244  int32_t x;
245  if (memorder == NLIB_ATOMIC_SEQ_CST)
246  OSCoherencyBarrier();
247  else if (memorder & NLIB_ATOMIC_RELEASE)
249  x = OSOrAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
250  if (memorder & NLIB_ATOMIC_ACQUIRE)
252  return x | val;
253 }
254 
255 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
256  int memorder) {
257  int32_t x;
258  if (memorder == NLIB_ATOMIC_SEQ_CST)
259  OSCoherencyBarrier();
260  else if (memorder & NLIB_ATOMIC_RELEASE)
262  x = OSAddAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
263  if (memorder & NLIB_ATOMIC_ACQUIRE)
265  return x;
266 }
267 
268 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
269  int memorder) {
270  int32_t x;
271  if (memorder == NLIB_ATOMIC_SEQ_CST)
272  OSCoherencyBarrier();
273  else if (memorder & NLIB_ATOMIC_RELEASE)
275  x = OSAddAtomic((volatile OSAtomicVar*)ptr, -val); // NOLINT
276  if (memorder & NLIB_ATOMIC_ACQUIRE)
278  return x;
279 }
280 
281 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
282  int memorder) {
283  int32_t x;
284  if (memorder == NLIB_ATOMIC_SEQ_CST)
285  OSCoherencyBarrier();
286  else if (memorder & NLIB_ATOMIC_RELEASE)
288  x = OSAndAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
289  if (memorder & NLIB_ATOMIC_ACQUIRE)
291  return x;
292 }
293 
294 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
295  int memorder) {
296  int32_t x;
297  if (memorder == NLIB_ATOMIC_SEQ_CST)
298  OSCoherencyBarrier();
299  else if (memorder & NLIB_ATOMIC_RELEASE)
301  x = OSXorAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
302  if (memorder & NLIB_ATOMIC_ACQUIRE)
304  return x;
305 }
306 
307 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
308  int memorder) {
309  int32_t x;
310  if (memorder == NLIB_ATOMIC_SEQ_CST)
311  OSCoherencyBarrier();
312  else if (memorder & NLIB_ATOMIC_RELEASE)
314  x = OSOrAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
315  if (memorder & NLIB_ATOMIC_ACQUIRE)
317  return x;
318 }
319 
320 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
321  int64_t rval = (int64_t)OSGetAtomic64((volatile OSAtomicVar64*)ptr); // NOLINT
322  if (memorder & NLIB_ATOMIC_ACQUIRE)
324  return rval;
325 }
326 
327 static NLIB_ALWAYS_INLINE void nlib_atomic_store64(int64_t* ptr, int64_t val,
328  int memorder) {
329  if (memorder == NLIB_ATOMIC_SEQ_CST)
330  OSCoherencyBarrier();
331  else if (memorder & NLIB_ATOMIC_RELEASE)
333  OSSetAtomic64((volatile OSAtomicVar64*)ptr, (u64)val);
334  if (memorder == NLIB_ATOMIC_SEQ_CST)
335  OSCoherencyBarrier();
336 }
337 
338 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
339  int memorder) {
340  uint64_t x;
341  if (memorder == NLIB_ATOMIC_SEQ_CST)
342  OSCoherencyBarrier();
343  else if (memorder & NLIB_ATOMIC_RELEASE)
345  x = OSSwapAtomic64((volatile OSAtomicVar64*)ptr, (uint64_t)val); // NOLINT
346  if (memorder & NLIB_ATOMIC_ACQUIRE)
348  return (int64_t)x;
349 }
350 
351 static NLIB_ALWAYS_INLINE int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
352  int64_t desired, int weak,
353  int success_memorder,
354  int failure_memorder) {
355  BOOL result;
356  (void)weak;
357  if (success_memorder == NLIB_ATOMIC_SEQ_CST)
358  OSCoherencyBarrier();
359  else if (success_memorder & NLIB_ATOMIC_RELEASE)
361 
362  result = OSCompareAndSwapAtomicEx64(
363  (volatile OSAtomicVar64*)ptr, // NOLINT
364  (u64)*expected, // NOLINT
365  (u64)desired, // NOLINT
366  (u64*)expected); // NOLINT
367  if (result) {
368  if (success_memorder & NLIB_ATOMIC_ACQUIRE)
370  } else {
371  if (failure_memorder & NLIB_ATOMIC_ACQUIRE)
373  }
374  return result;
375 }
376 
377 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
378  int memorder) {
379  int64_t x;
380  if (memorder == NLIB_ATOMIC_SEQ_CST)
381  OSCoherencyBarrier();
382  else if (memorder & NLIB_ATOMIC_RELEASE)
384  x = OSAddAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
385  if (memorder & NLIB_ATOMIC_ACQUIRE)
387  return x + val;
388 }
389 
390 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
391  int memorder) {
392  int64_t x;
393  if (memorder == NLIB_ATOMIC_SEQ_CST)
394  OSCoherencyBarrier();
395  else if (memorder & NLIB_ATOMIC_RELEASE)
397  x = OSAddAtomic64((volatile OSAtomicVar64*)ptr, -val); // NOLINT
398  if (memorder & NLIB_ATOMIC_ACQUIRE)
400  return x - val;
401 }
402 
403 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
404  int memorder) {
405  int64_t x;
406  if (memorder == NLIB_ATOMIC_SEQ_CST)
407  OSCoherencyBarrier();
408  else if (memorder & NLIB_ATOMIC_RELEASE)
410  x = OSAndAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
411  if (memorder & NLIB_ATOMIC_ACQUIRE)
413  return x & val;
414 }
415 
416 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
417  int memorder) {
418  int64_t x;
419  if (memorder == NLIB_ATOMIC_SEQ_CST)
420  OSCoherencyBarrier();
421  else if (memorder & NLIB_ATOMIC_RELEASE)
423  x = OSXorAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
424  if (memorder & NLIB_ATOMIC_ACQUIRE)
426  return x ^ val;
427 }
428 
429 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
430  int memorder) {
431  int64_t x;
432  if (memorder == NLIB_ATOMIC_SEQ_CST)
433  OSCoherencyBarrier();
434  else if (memorder & NLIB_ATOMIC_RELEASE)
436  x = OSOrAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
437  if (memorder & NLIB_ATOMIC_ACQUIRE)
439  return x | val;
440 }
441 
442 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
443  int memorder) {
444  int64_t x;
445  if (memorder == NLIB_ATOMIC_SEQ_CST)
446  OSCoherencyBarrier();
447  else if (memorder & NLIB_ATOMIC_RELEASE)
449  x = OSAddAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
450  if (memorder & NLIB_ATOMIC_ACQUIRE)
452  return x;
453 }
454 
455 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
456  int memorder) {
457  int64_t x;
458  if (memorder == NLIB_ATOMIC_SEQ_CST)
459  OSCoherencyBarrier();
460  else if (memorder & NLIB_ATOMIC_RELEASE)
462  x = OSAddAtomic64((volatile OSAtomicVar64*)ptr, -val); // NOLINT
463  if (memorder & NLIB_ATOMIC_ACQUIRE)
465  return x;
466 }
467 
468 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
469  int memorder) {
470  int64_t x;
471  if (memorder == NLIB_ATOMIC_SEQ_CST)
472  OSCoherencyBarrier();
473  else if (memorder & NLIB_ATOMIC_RELEASE)
475  x = OSAndAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
476  if (memorder & NLIB_ATOMIC_ACQUIRE)
478  return x;
479 }
480 
481 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
482  int memorder) {
483  int64_t x;
484  if (memorder == NLIB_ATOMIC_SEQ_CST)
485  OSCoherencyBarrier();
486  else if (memorder & NLIB_ATOMIC_RELEASE)
488  x = OSXorAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
489  if (memorder & NLIB_ATOMIC_ACQUIRE)
491  return x;
492 }
493 
494 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
495  int memorder) {
496  int64_t x;
497  if (memorder == NLIB_ATOMIC_SEQ_CST)
498  OSCoherencyBarrier();
499  else if (memorder & NLIB_ATOMIC_RELEASE)
501  x = OSOrAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
502  if (memorder & NLIB_ATOMIC_ACQUIRE)
504  return x;
505 }
506 
507 static NLIB_ALWAYS_INLINE void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
508  void* rval = *(void* volatile*)ptr; // NOLINT
509  if (memorder & NLIB_ATOMIC_ACQUIRE)
511  return rval;
512 }
513 
514 static NLIB_ALWAYS_INLINE void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
515  if (memorder == NLIB_ATOMIC_SEQ_CST)
516  OSCoherencyBarrier();
517  else if (memorder & NLIB_ATOMIC_RELEASE)
519  *(void* volatile*)ptr = val; // NOLINT
520  if (memorder == NLIB_ATOMIC_SEQ_CST)
521  OSCoherencyBarrier();
522 }
523 
524 static NLIB_ALWAYS_INLINE int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
525  void* desired, int weak,
526  int success_memorder,
527  int failure_memorder) {
528  return nlib_atomic_compare_exchange32((int32_t*)ptr, (int32_t*)expected, (int32_t)desired, // NOLINT
529  weak, success_memorder, failure_memorder);
530 }
531 
532 static NLIB_ALWAYS_INLINE void nlib_atomic_thread_fence(int memorder) {
533  switch (memorder) {
534  case NLIB_ATOMIC_RELAXED:
535  break;
536  case NLIB_ATOMIC_ACQUIRE:
538  break;
539  case NLIB_ATOMIC_RELEASE:
541  break;
542  case NLIB_ATOMIC_ACQ_REL:
544  break;
545  default:
547  break;
548  }
549 }
550 
551 #define NLIB_FD_O_RDONLY (0x0000)
552 #define NLIB_FD_O_WRONLY (0x0001)
553 #define NLIB_FD_O_RDWR (0x0002)
554 #define NLIB_FD_O_APPEND (0x0008)
555 #define NLIB_FD_O_CREAT (0x0100)
556 #define NLIB_FD_O_TRUNC (0x0200)
557 #define NLIB_FD_O_EXCL (0x0400)
558 
559 #ifdef __cplusplus
560 }
561 #endif
562 
563 #define NLIB_NOEXCEPT
564 
565 // --restrict is not specified in SDK's configuration
566 #ifndef __cplusplus
567 # define __restrict
568 #endif
569 
570 #define NLIB_MEMCPY(a, b, c) OSBlockMove((a), (b), (c), FALSE)
571 #define NLIB_MEMMOVE(a, b, c) OSBlockMove((a), (b), (c), FALSE)
572 #define NLIB_MEMSET(a, b, c) OSBlockSet((a), (b), (c))
573 
574 #ifndef NLIB_HAS_ZLIB
575 # define NLIB_HAS_ZLIB
576 #endif
577 
578 #ifndef NLIB_HAS_LIBCURL
579 # define NLIB_HAS_LIBCURL
580 #endif
581 
582 #endif
583 #endif // INCLUDE_NN_NLIB_PLATFORM_CAFE_H_
int32_t nlib_atomic_xor_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_fetch_and64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
void * nlib_atomic_exchangeptr(void **ptr, void *val, int memorder)
Swaps values in an atomic manner. Its behavior is similar to the one for __atomic_exchange_n() of gcc...
int nlib_atomic_compare_exchangeptr(void **ptr, void **expected, void *desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int32_t nlib_atomic_load32(const int32_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
#define NLIB_ALWAYS_INLINE
Indicates that the compiler is forced to perform inline expansion of functions.
Definition: Platform_unix.h:97
int64_t nlib_atomic_fetch_add64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
sem_t nlib_semaphore
The type for a semaphore object.
#define NLIB_MEMORY_ORDER_ACQUIRE
A memory fence. Corresponds to atomic_thread_fence(memory_order_acquire) in C++11.
#define NLIB_ATOMIC_RELEASE
Similar to __ATOMIC_RELEASE of gcc or std::memory_order_release of C++11.
int32_t nlib_atomic_or_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
int64_t nlib_atomic_fetch_sub64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int64_t nlib_atomic_and_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange64(int64_t *ptr, int64_t *expected, int64_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int64_t nlib_atomic_fetch_or64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
#define NLIB_ATOMIC_ACQ_REL
Similar to __ATOMIC_ACQ_REL of gcc or std::memory_order_acq_rel of C++11.
#define NLIB_ATOMIC_ACQUIRE
Similar to __ATOMIC_ACQUIRE of gcc or std::memory_order_acquire of C++11.
pthread_key_t nlib_tls
The type for TLS slot IDs.
void * nlib_atomic_loadptr(void *const *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
int64_t nlib_atomic_exchange64(int64_t *ptr, int64_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
#define NLIB_MEMORY_ORDER_SEQ_CST
A memory fence. Corresponds to atomic_thread_fence(memory_order_seq_cst) in C++11.
int64_t nlib_atomic_sub_fetch64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_xor32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
#define NLIB_MEMORY_ORDER_ACQ_REL
A memory fence. Corresponds to atomic_thread_fence(memory_order_acq_rel) in C++11.
int32_t nlib_atomic_sub_fetch32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_sub32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int32_t nlib_atomic_add_fetch32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
void nlib_atomic_storeptr(void **ptr, void *val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
void nlib_atomic_thread_fence(int memorder)
Places the specified memory barrier.
int32_t nlib_atomic_fetch_and32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
pthread_cond_t nlib_cond
The type for a condition variable object.
#define NLIB_ATOMIC_RELAXED
Similar to __ATOMIC_RELAXED of gcc or std::memory_order_relaxed of C++11.
pthread_mutex_t nlib_mutex
The type for mutex variables.
int64_t nlib_atomic_xor_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
#define NLIB_MEMORY_ORDER_RELEASE
A memory fence. Corresponds to atomic_thread_fence(memory_order_release) in C++11.
int64_t nlib_atomic_add_fetch64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
int32_t nlib_atomic_and_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange32(int32_t *ptr, int32_t *expected, int32_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
void nlib_atomic_store64(int64_t *ptr, int64_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int64_t nlib_atomic_or_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
#define NLIB_ATOMIC_SEQ_CST
Similar to __ATOMIC_SEQ_CST of gcc or std::memory_order_seq_cst of C++11.
int32_t nlib_atomic_fetch_or32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
int64_t nlib_atomic_fetch_xor64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
int32_t nlib_atomic_fetch_add32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
void nlib_atomic_store32(int32_t *ptr, int32_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int32_t nlib_atomic_exchange32(int32_t *ptr, int32_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
pthread_t nlib_thread
The identifier for threads.
int64_t nlib_atomic_load64(const int64_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...