nlib
Platform_cafe.h
1 
2 /*--------------------------------------------------------------------------------*
3  Project: CrossRoad
4  Copyright (C)Nintendo All rights reserved.
5 
6  These coded instructions, statements, and computer programs contain proprietary
7  information of Nintendo and/or its licensed developers and are protected by
8  national and international copyright laws. They may not be disclosed to third
9  parties or copied or duplicated in any form, in whole or in part, without the
10  prior written consent of Nintendo.
11 
12  The content herein is highly confidential and should be handled accordingly.
13  *--------------------------------------------------------------------------------*/
14 
15 #pragma once
16 #ifndef INCLUDE_NN_NLIB_PLATFORM_CAFE_H_
17 #define INCLUDE_NN_NLIB_PLATFORM_CAFE_H_
18 #ifndef INCLUDE_NN_NLIB_PLATFORM_H_
19 # error do not include directly
20 #endif
21 #ifdef CAFE
22 
23 #ifdef __cplusplus
24 #ifndef __STDC_LIMIT_MACROS
25 #define __STDC_LIMIT_MACROS
26 #endif
27 #ifndef __STDC_CONSTANT_MACROS
28 #define __STDC_CONSTANT_MACROS
29 #endif
30 #endif
31 
32 #include <ppc_ps.h>
33 #include <cafe/os.h>
34 #include <stdint.h>
35 
36 #ifdef __cplusplus
37 extern "C" {
38 #endif
39 
40 #ifndef NLIB_CAFE_PPC
41 # define NLIB_CAFE_PPC
42 #endif
43 #define NLIB_ALWAYS_INLINE inline __attribute__((always_inline))
44 #define NLIB_NEVER_INLINE __attribute__((__noinline__))
45 #ifdef __cplusplus
46 #define NLIB_LIKELY(x) __builtin_expect(!!(x), 1)
47 #define NLIB_UNLIKELY(x) __builtin_expect(!!(x), 0)
48 #else
49 #define NLIB_LIKELY(x) (x)
50 #define NLIB_UNLIKELY(x) (x)
51 #endif
52 #define NLIB_EXPECT(var, exp_value) __builtin_expect((var), (exp_value))
53 #define NLIB_CHECK_RESULT __attribute__((warn_unused_result))
54 #define NLIB_NORETURN __attribute__((noreturn))
55 #define NLIB_NONNULL __attribute__((nonnull))
56 #define NLIB_NONNULL_1 __attribute__((nonnull (1)))
57 #define NLIB_NONNULL_2 __attribute__((nonnull (2)))
58 #define NLIB_NONNULL_3 __attribute__((nonnull (3)))
59 #define NLIB_NONNULL_4 __attribute__((nonnull (4)))
60 #define NLIB_NONNULL_5 __attribute__((nonnull (5)))
61 #define NLIB_NONNULL_ENABLED
62 #define NLIB_ATTRIBUTE_MALLOC __attribute__((malloc))
63 #define NLIB_ATTRIBUTE_PURE __attribute__((pure))
64 #define NLIB_ATTRIBUTE_CONST __attribute__((const))
65 #define NLIB_ATTRIBUTE_ALLOC_SIZE1(n)
66 #define NLIB_ATTRIBUTE_ALLOC_SIZE2(n0, n1)
67 #define NLIB_ATTRIBUTE_ALLOC_ALIGN(algn)
68 #define NLIB_ATTRIBUTE_ASSUME_ALIGNED(n)
69 #ifndef NLIB_DEPRECATED
70 #define NLIB_DEPRECATED __attribute__((deprecated))
71 #endif
72 #ifndef NLIB_DEPRECATED_MSG
73 #define NLIB_DEPRECATED_MSG(msg) __attribute__((deprecated))
74 #endif
75 #define NLIB_VIS_HIDDEN
76 #define NLIB_VIS_PUBLIC
77 #define NLIB_WEAKSYMBOL __attribute__((weak))
78 
79 #define NLIB_MEMORY_ORDER_RELEASE __LWSYNC()
80 #define NLIB_MEMORY_ORDER_ACQUIRE __ISYNC()
81 #define NLIB_MEMORY_ORDER_ACQ_REL __LWSYNC(); __ISYNC()
82 #define NLIB_MEMORY_ORDER_SEQ_CST OSCoherencyBarrier()
83 
84 // GHS does not support '%zu', and cafe is 32bit environment
85 #define __PRIS_PREFIX
86 
87 typedef unsigned int nlib_tls;
88 
89 typedef OSFastMutex nlib_mutex;
90 #define NLIB_MUTEX_INITIALIZER {0}
91 #define NLIB_RECURSIVE_MUTEX_INITIALIZER {0}
92 #define NLIB_RECURSIVE_TIMED_MUTEX_INITIALIZER {0}
93 
94 typedef void* nlib_thread;
95 
96 typedef OSSemaphore nlib_semaphore;
97 
98 typedef OSFastCond nlib_cond;
99 #define NLIB_COND_INITIALIZER {0}
100 
101 void MyNoreturn_(void) __attribute__((noreturn));
102 #define NLIB_ASSUME(cond) switch (0) case 0: default: if (cond) ; else MyNoreturn_() /* NOLINT */
103 
104 #define NLIB_ATOMIC_RELAXED (0)
105 #define NLIB_ATOMIC_ACQUIRE (1)
106 #define NLIB_ATOMIC_RELEASE (2)
107 #define NLIB_ATOMIC_ACQ_REL (3)
108 #define NLIB_ATOMIC_SEQ_CST (7)
109 
110 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_load32(const int32_t* ptr, int memorder) {
111  int32_t rval = *(volatile int32_t*)ptr; // NOLINT
112  if (memorder & NLIB_ATOMIC_ACQUIRE)
114  return rval;
115 }
116 
117 static NLIB_ALWAYS_INLINE void nlib_atomic_store32(int32_t* ptr, int32_t val,
118  int memorder) {
119  if (memorder == NLIB_ATOMIC_SEQ_CST)
120  OSCoherencyBarrier();
121  else if (memorder & NLIB_ATOMIC_RELEASE)
123  *(volatile int32_t*)ptr = val; // NOLINT
124  if (memorder == NLIB_ATOMIC_SEQ_CST)
125  OSCoherencyBarrier();
126 }
127 
128 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_exchange32(int32_t* ptr, int32_t val,
129  int memorder) {
130  uint32_t x;
131  if (memorder == NLIB_ATOMIC_SEQ_CST)
132  OSCoherencyBarrier();
133  else if (memorder & NLIB_ATOMIC_RELEASE)
135  x = OSSwapAtomic((volatile OSAtomicVar*)ptr, (uint32_t)val); // NOLINT
136  if (memorder & NLIB_ATOMIC_ACQUIRE)
138  return (int32_t)x;
139 }
140 
141 static __inline void* nlib_atomic_exchangeptr(void** ptr, void* val, int memorder) {
142  return (void*)nlib_atomic_exchange32((int32_t*)ptr, (int32_t)val, memorder); // NOLINT
143 }
144 
145 static NLIB_ALWAYS_INLINE int nlib_atomic_compare_exchange32(int32_t* ptr, int32_t* expected,
146  int32_t desired, int weak,
147  int success_memorder,
148  int failure_memorder) {
149  if (success_memorder == NLIB_ATOMIC_SEQ_CST)
150  OSCoherencyBarrier();
151  else if (success_memorder & NLIB_ATOMIC_RELEASE)
153  if (weak == 0) {
154  BOOL result = OSCompareAndSwapAtomicEx(
155  (volatile OSAtomicVar*)ptr, // NOLINT
156  (u32)*expected, // NOLINT
157  (u32)desired, // NOLINT
158  (u32*)expected); // NOLINT
159  if (result) {
160  if (success_memorder & NLIB_ATOMIC_ACQUIRE)
162  } else {
163  if (failure_memorder & NLIB_ATOMIC_ACQUIRE)
165  }
166  return result;
167  } else {
168  u32 orig_val;
169  orig_val = (u32)__LWARX((u32*)ptr, 0);
170  if (orig_val == *expected) {
171  __DCBST(0, (u32)ptr);
172  if (__STWCX((u32*)ptr, 0, (u32)desired)) {
173  if (success_memorder & NLIB_ATOMIC_ACQUIRE)
175  return 1;
176  }
177  if (failure_memorder & NLIB_ATOMIC_ACQUIRE)
179  return 0;
180  } else {
181  *expected = (int32_t)orig_val;
182  if (failure_memorder & NLIB_ATOMIC_ACQUIRE)
184  return 0;
185  }
186  }
187 }
188 
189 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_add_fetch32(int32_t* ptr, int32_t val,
190  int memorder) {
191  int32_t x;
192  if (memorder == NLIB_ATOMIC_SEQ_CST)
193  OSCoherencyBarrier();
194  else if (memorder & NLIB_ATOMIC_RELEASE)
196  x = OSAddAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
197  if (memorder & NLIB_ATOMIC_ACQUIRE)
199  return x + val;
200 }
201 
202 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_sub_fetch32(int32_t* ptr, int32_t val,
203  int memorder) {
204  int32_t x;
205  if (memorder == NLIB_ATOMIC_SEQ_CST)
206  OSCoherencyBarrier();
207  else if (memorder & NLIB_ATOMIC_RELEASE)
209  x = OSAddAtomic((volatile OSAtomicVar*)ptr, -val); // NOLINT
210  if (memorder & NLIB_ATOMIC_ACQUIRE)
212  return x - val;
213 }
214 
215 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_and_fetch32(int32_t* ptr, int32_t val,
216  int memorder) {
217  int32_t x;
218  if (memorder == NLIB_ATOMIC_SEQ_CST)
219  OSCoherencyBarrier();
220  else if (memorder & NLIB_ATOMIC_RELEASE)
222  x = OSAndAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
223  if (memorder & NLIB_ATOMIC_ACQUIRE)
225  return x & val;
226 }
227 
228 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_xor_fetch32(int32_t* ptr, int32_t val,
229  int memorder) {
230  int32_t x;
231  if (memorder == NLIB_ATOMIC_SEQ_CST)
232  OSCoherencyBarrier();
233  else if (memorder & NLIB_ATOMIC_RELEASE)
235  x = OSXorAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
236  if (memorder & NLIB_ATOMIC_ACQUIRE)
238  return x ^ val;
239 }
240 
241 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_or_fetch32(int32_t* ptr, int32_t val,
242  int memorder) {
243  int32_t x;
244  if (memorder == NLIB_ATOMIC_SEQ_CST)
245  OSCoherencyBarrier();
246  else if (memorder & NLIB_ATOMIC_RELEASE)
248  x = OSOrAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
249  if (memorder & NLIB_ATOMIC_ACQUIRE)
251  return x | val;
252 }
253 
254 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_add32(int32_t* ptr, int32_t val,
255  int memorder) {
256  int32_t x;
257  if (memorder == NLIB_ATOMIC_SEQ_CST)
258  OSCoherencyBarrier();
259  else if (memorder & NLIB_ATOMIC_RELEASE)
261  x = OSAddAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
262  if (memorder & NLIB_ATOMIC_ACQUIRE)
264  return x;
265 }
266 
267 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_sub32(int32_t* ptr, int32_t val,
268  int memorder) {
269  int32_t x;
270  if (memorder == NLIB_ATOMIC_SEQ_CST)
271  OSCoherencyBarrier();
272  else if (memorder & NLIB_ATOMIC_RELEASE)
274  x = OSAddAtomic((volatile OSAtomicVar*)ptr, -val); // NOLINT
275  if (memorder & NLIB_ATOMIC_ACQUIRE)
277  return x;
278 }
279 
280 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_and32(int32_t* ptr, int32_t val,
281  int memorder) {
282  int32_t x;
283  if (memorder == NLIB_ATOMIC_SEQ_CST)
284  OSCoherencyBarrier();
285  else if (memorder & NLIB_ATOMIC_RELEASE)
287  x = OSAndAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
288  if (memorder & NLIB_ATOMIC_ACQUIRE)
290  return x;
291 }
292 
293 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_xor32(int32_t* ptr, int32_t val,
294  int memorder) {
295  int32_t x;
296  if (memorder == NLIB_ATOMIC_SEQ_CST)
297  OSCoherencyBarrier();
298  else if (memorder & NLIB_ATOMIC_RELEASE)
300  x = OSXorAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
301  if (memorder & NLIB_ATOMIC_ACQUIRE)
303  return x;
304 }
305 
306 static NLIB_ALWAYS_INLINE int32_t nlib_atomic_fetch_or32(int32_t* ptr, int32_t val,
307  int memorder) {
308  int32_t x;
309  if (memorder == NLIB_ATOMIC_SEQ_CST)
310  OSCoherencyBarrier();
311  else if (memorder & NLIB_ATOMIC_RELEASE)
313  x = OSOrAtomic((volatile OSAtomicVar*)ptr, val); // NOLINT
314  if (memorder & NLIB_ATOMIC_ACQUIRE)
316  return x;
317 }
318 
319 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_load64(const int64_t* ptr, int memorder) {
320  int64_t rval = (int64_t)OSGetAtomic64((volatile OSAtomicVar64*)ptr); // NOLINT
321  if (memorder & NLIB_ATOMIC_ACQUIRE)
323  return rval;
324 }
325 
326 static NLIB_ALWAYS_INLINE void nlib_atomic_store64(int64_t* ptr, int64_t val,
327  int memorder) {
328  if (memorder == NLIB_ATOMIC_SEQ_CST)
329  OSCoherencyBarrier();
330  else if (memorder & NLIB_ATOMIC_RELEASE)
332  OSSetAtomic64((volatile OSAtomicVar64*)ptr, (u64)val);
333  if (memorder == NLIB_ATOMIC_SEQ_CST)
334  OSCoherencyBarrier();
335 }
336 
337 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_exchange64(int64_t* ptr, int64_t val,
338  int memorder) {
339  uint64_t x;
340  if (memorder == NLIB_ATOMIC_SEQ_CST)
341  OSCoherencyBarrier();
342  else if (memorder & NLIB_ATOMIC_RELEASE)
344  x = OSSwapAtomic64((volatile OSAtomicVar64*)ptr, (uint64_t)val); // NOLINT
345  if (memorder & NLIB_ATOMIC_ACQUIRE)
347  return (int64_t)x;
348 }
349 
350 static NLIB_ALWAYS_INLINE int nlib_atomic_compare_exchange64(int64_t* ptr, int64_t* expected,
351  int64_t desired, int weak,
352  int success_memorder,
353  int failure_memorder) {
354  BOOL result;
355  (void)weak;
356  if (success_memorder == NLIB_ATOMIC_SEQ_CST)
357  OSCoherencyBarrier();
358  else if (success_memorder & NLIB_ATOMIC_RELEASE)
360 
361  result = OSCompareAndSwapAtomicEx64(
362  (volatile OSAtomicVar64*)ptr, // NOLINT
363  (u64)*expected, // NOLINT
364  (u64)desired, // NOLINT
365  (u64*)expected); // NOLINT
366  if (result) {
367  if (success_memorder & NLIB_ATOMIC_ACQUIRE)
369  } else {
370  if (failure_memorder & NLIB_ATOMIC_ACQUIRE)
372  }
373  return result;
374 }
375 
376 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_add_fetch64(int64_t* ptr, int64_t val,
377  int memorder) {
378  int64_t x;
379  if (memorder == NLIB_ATOMIC_SEQ_CST)
380  OSCoherencyBarrier();
381  else if (memorder & NLIB_ATOMIC_RELEASE)
383  x = OSAddAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
384  if (memorder & NLIB_ATOMIC_ACQUIRE)
386  return x + val;
387 }
388 
389 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_sub_fetch64(int64_t* ptr, int64_t val,
390  int memorder) {
391  int64_t x;
392  if (memorder == NLIB_ATOMIC_SEQ_CST)
393  OSCoherencyBarrier();
394  else if (memorder & NLIB_ATOMIC_RELEASE)
396  x = OSAddAtomic64((volatile OSAtomicVar64*)ptr, -val); // NOLINT
397  if (memorder & NLIB_ATOMIC_ACQUIRE)
399  return x - val;
400 }
401 
402 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_and_fetch64(int64_t* ptr, int64_t val,
403  int memorder) {
404  int64_t x;
405  if (memorder == NLIB_ATOMIC_SEQ_CST)
406  OSCoherencyBarrier();
407  else if (memorder & NLIB_ATOMIC_RELEASE)
409  x = OSAndAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
410  if (memorder & NLIB_ATOMIC_ACQUIRE)
412  return x & val;
413 }
414 
415 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_xor_fetch64(int64_t* ptr, int64_t val,
416  int memorder) {
417  int64_t x;
418  if (memorder == NLIB_ATOMIC_SEQ_CST)
419  OSCoherencyBarrier();
420  else if (memorder & NLIB_ATOMIC_RELEASE)
422  x = OSXorAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
423  if (memorder & NLIB_ATOMIC_ACQUIRE)
425  return x ^ val;
426 }
427 
428 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_or_fetch64(int64_t* ptr, int64_t val,
429  int memorder) {
430  int64_t x;
431  if (memorder == NLIB_ATOMIC_SEQ_CST)
432  OSCoherencyBarrier();
433  else if (memorder & NLIB_ATOMIC_RELEASE)
435  x = OSOrAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
436  if (memorder & NLIB_ATOMIC_ACQUIRE)
438  return x | val;
439 }
440 
441 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_add64(int64_t* ptr, int64_t val,
442  int memorder) {
443  int64_t x;
444  if (memorder == NLIB_ATOMIC_SEQ_CST)
445  OSCoherencyBarrier();
446  else if (memorder & NLIB_ATOMIC_RELEASE)
448  x = OSAddAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
449  if (memorder & NLIB_ATOMIC_ACQUIRE)
451  return x;
452 }
453 
454 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_sub64(int64_t* ptr, int64_t val,
455  int memorder) {
456  int64_t x;
457  if (memorder == NLIB_ATOMIC_SEQ_CST)
458  OSCoherencyBarrier();
459  else if (memorder & NLIB_ATOMIC_RELEASE)
461  x = OSAddAtomic64((volatile OSAtomicVar64*)ptr, -val); // NOLINT
462  if (memorder & NLIB_ATOMIC_ACQUIRE)
464  return x;
465 }
466 
467 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_and64(int64_t* ptr, int64_t val,
468  int memorder) {
469  int64_t x;
470  if (memorder == NLIB_ATOMIC_SEQ_CST)
471  OSCoherencyBarrier();
472  else if (memorder & NLIB_ATOMIC_RELEASE)
474  x = OSAndAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
475  if (memorder & NLIB_ATOMIC_ACQUIRE)
477  return x;
478 }
479 
480 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_xor64(int64_t* ptr, int64_t val,
481  int memorder) {
482  int64_t x;
483  if (memorder == NLIB_ATOMIC_SEQ_CST)
484  OSCoherencyBarrier();
485  else if (memorder & NLIB_ATOMIC_RELEASE)
487  x = OSXorAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
488  if (memorder & NLIB_ATOMIC_ACQUIRE)
490  return x;
491 }
492 
493 static NLIB_ALWAYS_INLINE int64_t nlib_atomic_fetch_or64(int64_t* ptr, int64_t val,
494  int memorder) {
495  int64_t x;
496  if (memorder == NLIB_ATOMIC_SEQ_CST)
497  OSCoherencyBarrier();
498  else if (memorder & NLIB_ATOMIC_RELEASE)
500  x = OSOrAtomic64((volatile OSAtomicVar64*)ptr, val); // NOLINT
501  if (memorder & NLIB_ATOMIC_ACQUIRE)
503  return x;
504 }
505 
506 static NLIB_ALWAYS_INLINE void* nlib_atomic_loadptr(void* const* ptr, int memorder) {
507  void* rval = *(void* volatile*)ptr; // NOLINT
508  if (memorder & NLIB_ATOMIC_ACQUIRE)
510  return rval;
511 }
512 
513 static NLIB_ALWAYS_INLINE void nlib_atomic_storeptr(void** ptr, void* val, int memorder) {
514  if (memorder == NLIB_ATOMIC_SEQ_CST)
515  OSCoherencyBarrier();
516  else if (memorder & NLIB_ATOMIC_RELEASE)
518  *(void* volatile*)ptr = val; // NOLINT
519  if (memorder == NLIB_ATOMIC_SEQ_CST)
520  OSCoherencyBarrier();
521 }
522 
523 static NLIB_ALWAYS_INLINE int nlib_atomic_compare_exchangeptr(void** ptr, void** expected,
524  void* desired, int weak,
525  int success_memorder,
526  int failure_memorder) {
527  return nlib_atomic_compare_exchange32((int32_t*)ptr, (int32_t*)expected, (int32_t)desired, // NOLINT
528  weak, success_memorder, failure_memorder);
529 }
530 
531 static NLIB_ALWAYS_INLINE void nlib_atomic_thread_fence(int memorder) {
532  switch (memorder) {
533  case NLIB_ATOMIC_RELAXED:
534  break;
535  case NLIB_ATOMIC_ACQUIRE:
537  break;
538  case NLIB_ATOMIC_RELEASE:
540  break;
541  case NLIB_ATOMIC_ACQ_REL:
543  break;
544  default:
546  break;
547  }
548 }
549 
550 #define NLIB_FD_O_RDONLY (0x0000)
551 #define NLIB_FD_O_WRONLY (0x0001)
552 #define NLIB_FD_O_RDWR (0x0002)
553 #define NLIB_FD_O_APPEND (0x0008)
554 #define NLIB_FD_O_CREAT (0x0100)
555 #define NLIB_FD_O_TRUNC (0x0200)
556 #define NLIB_FD_O_EXCL (0x0400)
557 
558 #ifdef __cplusplus
559 }
560 #endif
561 
562 #define NLIB_NOEXCEPT
563 
564 // --restrict is not specified in SDK's configuration
565 #ifndef __cplusplus
566 # define __restrict
567 #endif
568 
569 #define NLIB_MEMCPY(a, b, c) OSBlockMove((a), (b), (c), FALSE)
570 #define NLIB_MEMMOVE(a, b, c) OSBlockMove((a), (b), (c), FALSE)
571 #define NLIB_MEMSET(a, b, c) OSBlockSet((a), (b), (c))
572 
573 #ifndef NLIB_HAS_ZLIB
574 # define NLIB_HAS_ZLIB
575 #endif
576 
577 #ifndef NLIB_HAS_LIBCURL
578 # define NLIB_HAS_LIBCURL
579 #endif
580 
581 #endif
582 #endif // INCLUDE_NN_NLIB_PLATFORM_CAFE_H_
int32_t nlib_atomic_xor_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
int64_t nlib_atomic_fetch_and64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
void * nlib_atomic_exchangeptr(void **ptr, void *val, int memorder)
Swaps values in an atomic manner. Its behavior is similar to the one for __atomic_exchange_n() of gcc...
int nlib_atomic_compare_exchangeptr(void **ptr, void **expected, void *desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int32_t nlib_atomic_load32(const int32_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
#define NLIB_ALWAYS_INLINE
Indicates that the compiler is forced to perform inline expansion of functions.
Definition: Platform_unix.h:97
int64_t nlib_atomic_fetch_add64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
sem_t nlib_semaphore
The type for a semaphore object.
#define NLIB_MEMORY_ORDER_ACQUIRE
A memory fence. Corresponds to atomic_thread_fence(memory_order_acquire) in C++11.
#define NLIB_ATOMIC_RELEASE
Similar to __ATOMIC_RELEASE of gcc or std::memory_order_release of C++11.
int32_t nlib_atomic_or_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
int64_t nlib_atomic_fetch_sub64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int64_t nlib_atomic_and_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange64(int64_t *ptr, int64_t *expected, int64_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
int64_t nlib_atomic_fetch_or64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
#define NLIB_ATOMIC_ACQ_REL
Similar to __ATOMIC_ACQ_REL of gcc or std::memory_order_acq_rel of C++11.
#define NLIB_ATOMIC_ACQUIRE
Similar to __ATOMIC_ACQUIRE of gcc or std::memory_order_acquire of C++11.
pthread_key_t nlib_tls
The type for TLS slot IDs.
void * nlib_atomic_loadptr(void *const *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...
int64_t nlib_atomic_exchange64(int64_t *ptr, int64_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
#define NLIB_MEMORY_ORDER_SEQ_CST
A memory fence. Corresponds to atomic_thread_fence(memory_order_seq_cst) in C++11.
int64_t nlib_atomic_sub_fetch64(int64_t *ptr, int64_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_xor32(int32_t *ptr, int32_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
#define NLIB_MEMORY_ORDER_ACQ_REL
A memory fence. Corresponds to atomic_thread_fence(memory_order_acq_rel) in C++11.
int32_t nlib_atomic_sub_fetch32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_sub_fetch() of gcc...
int32_t nlib_atomic_fetch_sub32(int32_t *ptr, int32_t val, int memorder)
Subtracts atomic values. Its behavior is similar to the one for __atomic_fetch_sub() of gcc...
int32_t nlib_atomic_add_fetch32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
void nlib_atomic_storeptr(void **ptr, void *val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
void nlib_atomic_thread_fence(int memorder)
Places the specified memory barrier.
int32_t nlib_atomic_fetch_and32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_fetch_and() of gcc...
pthread_cond_t nlib_cond
The type for a condition variable object.
#define NLIB_ATOMIC_RELAXED
Similar to __ATOMIC_RELAXED of gcc or std::memory_order_relaxed of C++11.
pthread_mutex_t nlib_mutex
The type for mutex variables.
int64_t nlib_atomic_xor_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_xor_fetch() of gcc...
#define NLIB_MEMORY_ORDER_RELEASE
A memory fence. Corresponds to atomic_thread_fence(memory_order_release) in C++11.
int64_t nlib_atomic_add_fetch64(int64_t *ptr, int64_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_add_fetch() of gcc...
int32_t nlib_atomic_and_fetch32(int32_t *ptr, int32_t val, int memorder)
Calculates AND of atomic values. Its behavior is similar to the one for __atomic_and_fetch() of gcc...
int nlib_atomic_compare_exchange32(int32_t *ptr, int32_t *expected, int32_t desired, int weak, int success_memorder, int failure_memorder)
Compares and swaps atomic values. Its behavior is similar to the one for __atomic_compare_exchange_n(...
void nlib_atomic_store64(int64_t *ptr, int64_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int64_t nlib_atomic_or_fetch64(int64_t *ptr, int64_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_or_fetch() of gcc...
#define NLIB_ATOMIC_SEQ_CST
Similar to __ATOMIC_SEQ_CST of gcc or std::memory_order_seq_cst of C++11.
int32_t nlib_atomic_fetch_or32(int32_t *ptr, int32_t val, int memorder)
Calculates OR of atomic values. Its behavior is similar to the one for __atomic_fetch_or() of gcc...
int64_t nlib_atomic_fetch_xor64(int64_t *ptr, int64_t val, int memorder)
Calculates XOR of atomic values. Its behavior is similar to the one for __atomic_fetch_xor() of gcc...
int32_t nlib_atomic_fetch_add32(int32_t *ptr, int32_t val, int memorder)
Adds atomic values. Its behavior is similar to the one for __atomic_fetch_add() of gcc...
void nlib_atomic_store32(int32_t *ptr, int32_t val, int memorder)
Stores a value in an atomic operation. Its behavior is similar to the one for __atomic_store_n() of g...
int32_t nlib_atomic_exchange32(int32_t *ptr, int32_t val, int memorder)
Swaps values in an atomic operation. Its behavior is similar to the one for __atomic_exchange_n() of ...
pthread_t nlib_thread
The identifier for threads.
int64_t nlib_atomic_load64(const int64_t *ptr, int memorder)
Loads a value in an atomic operation. Its behavior is similar to the one for __atomic_load_n() of gcc...