LLVM OpenMP* Runtime Library
kmp_os.h
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef KMP_OS_H
14 #define KMP_OS_H
15 
16 #include "kmp_config.h"
17 #include <atomic>
18 #include <stdarg.h>
19 #include <stdlib.h>
20 
21 #define KMP_FTN_PLAIN 1
22 #define KMP_FTN_APPEND 2
23 #define KMP_FTN_UPPER 3
24 /*
25 #define KMP_FTN_PREPEND 4
26 #define KMP_FTN_UAPPEND 5
27 */
28 
29 #define KMP_PTR_SKIP (sizeof(void *))
30 
31 /* -------------------------- Compiler variations ------------------------ */
32 
33 #define KMP_OFF 0
34 #define KMP_ON 1
35 
36 #define KMP_MEM_CONS_VOLATILE 0
37 #define KMP_MEM_CONS_FENCE 1
38 
39 #ifndef KMP_MEM_CONS_MODEL
40 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
41 #endif
42 
43 #ifndef __has_cpp_attribute
44 #define __has_cpp_attribute(x) 0
45 #endif
46 
47 #ifndef __has_attribute
48 #define __has_attribute(x) 0
49 #endif
50 
51 /* ------------------------- Compiler recognition ---------------------- */
52 #define KMP_COMPILER_ICC 0
53 #define KMP_COMPILER_GCC 0
54 #define KMP_COMPILER_CLANG 0
55 #define KMP_COMPILER_MSVC 0
56 
57 #if defined(__INTEL_COMPILER)
58 #undef KMP_COMPILER_ICC
59 #define KMP_COMPILER_ICC 1
60 #elif defined(__clang__)
61 #undef KMP_COMPILER_CLANG
62 #define KMP_COMPILER_CLANG 1
63 #elif defined(__GNUC__)
64 #undef KMP_COMPILER_GCC
65 #define KMP_COMPILER_GCC 1
66 #elif defined(_MSC_VER)
67 #undef KMP_COMPILER_MSVC
68 #define KMP_COMPILER_MSVC 1
69 #else
70 #error Unknown compiler
71 #endif
72 
73 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD)
74 #define KMP_AFFINITY_SUPPORTED 1
75 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
76 #define KMP_GROUP_AFFINITY 1
77 #else
78 #define KMP_GROUP_AFFINITY 0
79 #endif
80 #else
81 #define KMP_AFFINITY_SUPPORTED 0
82 #define KMP_GROUP_AFFINITY 0
83 #endif
84 
85 /* Check for quad-precision extension. */
86 #define KMP_HAVE_QUAD 0
87 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
88 #if KMP_COMPILER_ICC
89 /* _Quad is already defined for icc */
90 #undef KMP_HAVE_QUAD
91 #define KMP_HAVE_QUAD 1
92 #elif KMP_COMPILER_CLANG
93 /* Clang doesn't support a software-implemented
94  128-bit extended precision type yet */
95 typedef long double _Quad;
96 #elif KMP_COMPILER_GCC
97 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad */
98 #if !KMP_OS_NETBSD
99 typedef __float128 _Quad;
100 #undef KMP_HAVE_QUAD
101 #define KMP_HAVE_QUAD 1
102 #endif
103 #elif KMP_COMPILER_MSVC
104 typedef long double _Quad;
105 #endif
106 #else
107 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
108 typedef long double _Quad;
109 #undef KMP_HAVE_QUAD
110 #define KMP_HAVE_QUAD 1
111 #endif
112 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
113 
114 #define KMP_USE_X87CONTROL 0
115 #if KMP_OS_WINDOWS
116 #define KMP_END_OF_LINE "\r\n"
117 typedef char kmp_int8;
118 typedef unsigned char kmp_uint8;
119 typedef short kmp_int16;
120 typedef unsigned short kmp_uint16;
121 typedef int kmp_int32;
122 typedef unsigned int kmp_uint32;
123 #define KMP_INT32_SPEC "d"
124 #define KMP_UINT32_SPEC "u"
125 #ifndef KMP_STRUCT64
126 typedef __int64 kmp_int64;
127 typedef unsigned __int64 kmp_uint64;
128 #define KMP_INT64_SPEC "I64d"
129 #define KMP_UINT64_SPEC "I64u"
130 #else
131 struct kmp_struct64 {
132  kmp_int32 a, b;
133 };
134 typedef struct kmp_struct64 kmp_int64;
135 typedef struct kmp_struct64 kmp_uint64;
136 /* Not sure what to use for KMP_[U]INT64_SPEC here */
137 #endif
138 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
139 #undef KMP_USE_X87CONTROL
140 #define KMP_USE_X87CONTROL 1
141 #endif
142 #if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
143 #define KMP_INTPTR 1
144 typedef __int64 kmp_intptr_t;
145 typedef unsigned __int64 kmp_uintptr_t;
146 #define KMP_INTPTR_SPEC "I64d"
147 #define KMP_UINTPTR_SPEC "I64u"
148 #endif
149 #endif /* KMP_OS_WINDOWS */
150 
151 #if KMP_OS_UNIX
152 #define KMP_END_OF_LINE "\n"
153 typedef char kmp_int8;
154 typedef unsigned char kmp_uint8;
155 typedef short kmp_int16;
156 typedef unsigned short kmp_uint16;
157 typedef int kmp_int32;
158 typedef unsigned int kmp_uint32;
159 typedef long long kmp_int64;
160 typedef unsigned long long kmp_uint64;
161 #define KMP_INT32_SPEC "d"
162 #define KMP_UINT32_SPEC "u"
163 #define KMP_INT64_SPEC "lld"
164 #define KMP_UINT64_SPEC "llu"
165 #endif /* KMP_OS_UNIX */
166 
167 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
168 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
169 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
170  KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
171 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
172 #else
173 #error "Can't determine size_t printf format specifier."
174 #endif
175 
176 #if KMP_ARCH_X86
177 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
178 #else
179 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
180 #endif
181 
182 typedef size_t kmp_size_t;
183 typedef float kmp_real32;
184 typedef double kmp_real64;
185 
186 #ifndef KMP_INTPTR
187 #define KMP_INTPTR 1
188 typedef long kmp_intptr_t;
189 typedef unsigned long kmp_uintptr_t;
190 #define KMP_INTPTR_SPEC "ld"
191 #define KMP_UINTPTR_SPEC "lu"
192 #endif
193 
194 #ifdef BUILD_I8
195 typedef kmp_int64 kmp_int;
196 typedef kmp_uint64 kmp_uint;
197 #else
198 typedef kmp_int32 kmp_int;
199 typedef kmp_uint32 kmp_uint;
200 #endif /* BUILD_I8 */
201 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
202 #define KMP_INT_MIN ((kmp_int32)0x80000000)
203 
204 // stdarg handling
205 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64) && \
206  (KMP_OS_FREEBSD || KMP_OS_LINUX)
207 typedef va_list *kmp_va_list;
208 #define kmp_va_deref(ap) (*(ap))
209 #define kmp_va_addr_of(ap) (&(ap))
210 #else
211 typedef va_list kmp_va_list;
212 #define kmp_va_deref(ap) (ap)
213 #define kmp_va_addr_of(ap) (ap)
214 #endif
215 
216 #ifdef __cplusplus
217 // macros to cast out qualifiers and to re-interpret types
218 #define CCAST(type, var) const_cast<type>(var)
219 #define RCAST(type, var) reinterpret_cast<type>(var)
220 //-------------------------------------------------------------------------
221 // template for debug prints specification ( d, u, lld, llu ), and to obtain
222 // signed/unsigned flavors of a type
223 template <typename T> struct traits_t {};
224 // int
225 template <> struct traits_t<signed int> {
226  typedef signed int signed_t;
227  typedef unsigned int unsigned_t;
228  typedef double floating_t;
229  static char const *spec;
230  static const signed_t max_value = 0x7fffffff;
231  static const signed_t min_value = 0x80000000;
232  static const int type_size = sizeof(signed_t);
233 };
234 // unsigned int
235 template <> struct traits_t<unsigned int> {
236  typedef signed int signed_t;
237  typedef unsigned int unsigned_t;
238  typedef double floating_t;
239  static char const *spec;
240  static const unsigned_t max_value = 0xffffffff;
241  static const unsigned_t min_value = 0x00000000;
242  static const int type_size = sizeof(unsigned_t);
243 };
244 // long
245 template <> struct traits_t<signed long> {
246  typedef signed long signed_t;
247  typedef unsigned long unsigned_t;
248  typedef long double floating_t;
249  static char const *spec;
250  static const int type_size = sizeof(signed_t);
251 };
252 // long long
253 template <> struct traits_t<signed long long> {
254  typedef signed long long signed_t;
255  typedef unsigned long long unsigned_t;
256  typedef long double floating_t;
257  static char const *spec;
258  static const signed_t max_value = 0x7fffffffffffffffLL;
259  static const signed_t min_value = 0x8000000000000000LL;
260  static const int type_size = sizeof(signed_t);
261 };
262 // unsigned long long
263 template <> struct traits_t<unsigned long long> {
264  typedef signed long long signed_t;
265  typedef unsigned long long unsigned_t;
266  typedef long double floating_t;
267  static char const *spec;
268  static const unsigned_t max_value = 0xffffffffffffffffLL;
269  static const unsigned_t min_value = 0x0000000000000000LL;
270  static const int type_size = sizeof(unsigned_t);
271 };
272 //-------------------------------------------------------------------------
273 #else
274 #define CCAST(type, var) (type)(var)
275 #define RCAST(type, var) (type)(var)
276 #endif // __cplusplus
277 
278 #define KMP_EXPORT extern /* export declaration in guide libraries */
279 
280 #if __GNUC__ >= 4 && !defined(__MINGW32__)
281 #define __forceinline __inline
282 #endif
283 
284 /* Check if the OS/arch can support user-level mwait */
285 // All mwait code tests for UMWAIT first, so it should only fall back to ring3
286 // MWAIT for KNL.
287 #define KMP_HAVE_MWAIT \
288  ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
289  !KMP_MIC2)
290 #define KMP_HAVE_UMWAIT \
291  ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
292  !KMP_MIC)
293 
294 #if KMP_OS_WINDOWS
295 #include <windows.h>
296 
297 static inline int KMP_GET_PAGE_SIZE(void) {
298  SYSTEM_INFO si;
299  GetSystemInfo(&si);
300  return si.dwPageSize;
301 }
302 #else
303 #define KMP_GET_PAGE_SIZE() getpagesize()
304 #endif
305 
306 #define PAGE_ALIGNED(_addr) \
307  (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
308 #define ALIGN_TO_PAGE(x) \
309  (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
310 
311 /* ---------- Support for cache alignment, padding, etc. ----------------*/
312 
313 #ifdef __cplusplus
314 extern "C" {
315 #endif // __cplusplus
316 
317 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
318 
319 /* Define the default size of the cache line */
320 #ifndef CACHE_LINE
321 #define CACHE_LINE 128 /* cache line size in bytes */
322 #else
323 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
324 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
325 #warning CACHE_LINE is too small.
326 #endif
327 #endif /* CACHE_LINE */
328 
329 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
330 
331 // Define attribute that indicates that the fall through from the previous
332 // case label is intentional and should not be diagnosed by a compiler
333 // Code from libcxx/include/__config
334 // Use a function like macro to imply that it must be followed by a semicolon
335 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
336 #define KMP_FALLTHROUGH() [[fallthrough]]
337 #elif __has_cpp_attribute(clang::fallthrough)
338 #define KMP_FALLTHROUGH() [[clang::fallthrough]]
339 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
340 #define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
341 #else
342 #define KMP_FALLTHROUGH() ((void)0)
343 #endif
344 
345 #if KMP_HAVE_ATTRIBUTE_WAITPKG
346 #define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
347 #else
348 #define KMP_ATTRIBUTE_TARGET_WAITPKG /* Nothing */
349 #endif
350 
351 #if KMP_HAVE_ATTRIBUTE_RTM
352 #define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
353 #else
354 #define KMP_ATTRIBUTE_TARGET_RTM /* Nothing */
355 #endif
356 
357 // Define attribute that indicates a function does not return
358 #if __cplusplus >= 201103L
359 #define KMP_NORETURN [[noreturn]]
360 #elif KMP_OS_WINDOWS
361 #define KMP_NORETURN __declspec(noreturn)
362 #else
363 #define KMP_NORETURN __attribute__((noreturn))
364 #endif
365 
366 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
367 #define KMP_ALIGN(bytes) __declspec(align(bytes))
368 #define KMP_THREAD_LOCAL __declspec(thread)
369 #define KMP_ALIAS /* Nothing */
370 #else
371 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
372 #define KMP_THREAD_LOCAL __thread
373 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
374 #endif
375 
376 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
377 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
378 #else
379 #define KMP_WEAK_ATTRIBUTE_EXTERNAL /* Nothing */
380 #endif
381 
382 #if KMP_HAVE_WEAK_ATTRIBUTE
383 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
384 #else
385 #define KMP_WEAK_ATTRIBUTE_INTERNAL /* Nothing */
386 #endif
387 
388 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
389 #ifndef KMP_STR
390 #define KMP_STR(x) _KMP_STR(x)
391 #define _KMP_STR(x) #x
392 #endif
393 
394 #ifdef KMP_USE_VERSION_SYMBOLS
395 // If using versioned symbols, KMP_EXPAND_NAME prepends
396 // __kmp_api_ to the real API name
397 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
398 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
399 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
400  _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
401 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
402  __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
403  __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
404  __asm__( \
405  ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
406  api_name) "@" ver_str "\n\t"); \
407  __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
408  api_name) "@@" default_ver "\n\t")
409 
410 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) \
411  _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
412 #define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, \
413  default_ver) \
414  __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias \
415  __attribute__((alias(KMP_STR(__kmp_api_##apic_name)))); \
416  __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR( \
417  apic_name) "@@" default_ver "\n\t"); \
418  __asm__( \
419  ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
420  api_name) "@" ver_str "\n\t")
421 
422 #else // KMP_USE_VERSION_SYMBOLS
423 #define KMP_EXPAND_NAME(api_name) api_name
424 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
425 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, \
426  ver_str) /* Nothing */
427 #endif // KMP_USE_VERSION_SYMBOLS
428 
429 /* Temporary note: if performance testing of this passes, we can remove
430  all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
431 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
432 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
433 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
434 
435 /* General purpose fence types for memory operations */
436 enum kmp_mem_fence_type {
437  kmp_no_fence, /* No memory fence */
438  kmp_acquire_fence, /* Acquire (read) memory fence */
439  kmp_release_fence, /* Release (write) memory fence */
440  kmp_full_fence /* Full (read+write) memory fence */
441 };
442 
443 // Synchronization primitives
444 
445 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
446 
447 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
448 #pragma intrinsic(InterlockedExchangeAdd)
449 #pragma intrinsic(InterlockedCompareExchange)
450 #pragma intrinsic(InterlockedExchange)
451 #pragma intrinsic(InterlockedExchange64)
452 #endif
453 
454 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
455 // ordering problem, so we use InterlockedExchangeAdd instead.
456 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
457 #define KMP_TEST_THEN_INC_ACQ32(p) \
458  InterlockedExchangeAdd((volatile long *)(p), 1)
459 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
460 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
461  InterlockedExchangeAdd((volatile long *)(p), 4)
462 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
463 #define KMP_TEST_THEN_DEC_ACQ32(p) \
464  InterlockedExchangeAdd((volatile long *)(p), -1)
465 #define KMP_TEST_THEN_ADD32(p, v) \
466  InterlockedExchangeAdd((volatile long *)(p), (v))
467 
468 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
469  InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
470 
471 #define KMP_XCHG_FIXED32(p, v) \
472  InterlockedExchange((volatile long *)(p), (long)(v))
473 #define KMP_XCHG_FIXED64(p, v) \
474  InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
475 
476 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
477  kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
478  return *(kmp_real32 *)&tmp;
479 }
480 
481 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
482 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
483 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
484 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
485 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
486 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
487 
488 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
489 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
490 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
491 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
492 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
493 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
494 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
495 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
496 
497 #if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
498 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
499 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
500 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
501 // #define KMP_TEST_THEN_ADD4_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 4LL)
502 // #define KMP_TEST_THEN_DEC64(p) _InterlockedExchangeAdd64((p), -1LL)
503 // #define KMP_TEST_THEN_DEC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), -1LL)
504 // #define KMP_TEST_THEN_ADD8(p, v) _InterlockedExchangeAdd8((p), (v))
505 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
506 
507 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
508  __kmp_compare_and_store_acq8((p), (cv), (sv))
509 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
510  __kmp_compare_and_store_rel8((p), (cv), (sv))
511 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
512  __kmp_compare_and_store_acq16((p), (cv), (sv))
513 // #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
514 // __kmp_compare_and_store_rel16((p), (cv), (sv))
515 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
516  __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
517  (kmp_int32)(sv))
518 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
519  __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
520  (kmp_int32)(sv))
521 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
522  __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
523  (kmp_int64)(sv))
524 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
525  __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
526  (kmp_int64)(sv))
527 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
528  __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
529 
530 // KMP_COMPARE_AND_STORE expects this order: pointer, compare, exchange
531 // _InterlockedCompareExchange expects this order: pointer, exchange, compare
532 // KMP_COMPARE_AND_STORE also returns a bool indicating a successful write. A
533 // write is successful if the return value of _InterlockedCompareExchange is the
534 // same as the compare value.
535 inline kmp_int8 __kmp_compare_and_store_acq8(volatile kmp_int8 *p, kmp_int8 cv,
536  kmp_int8 sv) {
537  return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
538 }
539 
540 inline kmp_int8 __kmp_compare_and_store_rel8(volatile kmp_int8 *p, kmp_int8 cv,
541  kmp_int8 sv) {
542  return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
543 }
544 
545 inline kmp_int16 __kmp_compare_and_store_acq16(volatile kmp_int16 *p,
546  kmp_int16 cv, kmp_int16 sv) {
547  return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
548 }
549 
550 inline kmp_int16 __kmp_compare_and_store_rel16(volatile kmp_int16 *p,
551  kmp_int16 cv, kmp_int16 sv) {
552  return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
553 }
554 
555 inline kmp_int32 __kmp_compare_and_store_acq32(volatile kmp_int32 *p,
556  kmp_int32 cv, kmp_int32 sv) {
557  return _InterlockedCompareExchange_acq((volatile long *)p, sv, cv) == cv;
558 }
559 
560 inline kmp_int32 __kmp_compare_and_store_rel32(volatile kmp_int32 *p,
561  kmp_int32 cv, kmp_int32 sv) {
562  return _InterlockedCompareExchange_rel((volatile long *)p, sv, cv) == cv;
563 }
564 
565 inline kmp_int32 __kmp_compare_and_store_acq64(volatile kmp_int64 *p,
566  kmp_int64 cv, kmp_int64 sv) {
567  return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
568 }
569 
570 inline kmp_int32 __kmp_compare_and_store_rel64(volatile kmp_int64 *p,
571  kmp_int64 cv, kmp_int64 sv) {
572  return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
573 }
574 
575 inline kmp_int32 __kmp_compare_and_store_ptr(void *volatile *p, void *cv,
576  void *sv) {
577  return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
578 }
579 
580 // The _RET versions return the value instead of a bool
581 // #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
582 // _InterlockedCompareExchange8((p), (sv), (cv))
583 // #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
584 // _InterlockedCompareExchange16((p), (sv), (cv))
585 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
586  _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
587  (kmp_int64)(cv))
588 
589 // #define KMP_XCHG_FIXED8(p, v) \
590 // _InterlockedExchange8((volatile kmp_int8 *)(p), (kmp_int8)(v));
591 // #define KMP_XCHG_FIXED16(p, v) _InterlockedExchange16((p), (v));
592 // #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v)));
593 
594 // inline kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v) {
595 // kmp_int64 tmp = _InterlockedExchange64((volatile kmp_int64 *)p, *(kmp_int64
596 // *)&v); return *(kmp_real64 *)&tmp;
597 // }
598 
599 #else // !KMP_ARCH_AARCH64
600 
601 // Routines that we still need to implement in assembly.
602 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
603 
604 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
605  kmp_int8 sv);
606 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
607  kmp_int16 sv);
608 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
609  kmp_int32 sv);
610 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
611  kmp_int64 sv);
612 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
613  kmp_int8 sv);
614 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
615  kmp_int16 cv, kmp_int16 sv);
616 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
617  kmp_int32 cv, kmp_int32 sv);
618 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
619  kmp_int64 cv, kmp_int64 sv);
620 
621 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
622 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
623 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
624 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
625 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
626 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
627 
628 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
629 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
630 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
631 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
632 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
633 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
634 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
635 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
636 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
637 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
638 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
639 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
640 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
641 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
642 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
643 
644 
645 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
646  __kmp_compare_and_store8((p), (cv), (sv))
647 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
648  __kmp_compare_and_store8((p), (cv), (sv))
649 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
650  __kmp_compare_and_store16((p), (cv), (sv))
651 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
652  __kmp_compare_and_store16((p), (cv), (sv))
653 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
654  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
655  (kmp_int32)(sv))
656 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
657  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
658  (kmp_int32)(sv))
659 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
660  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
661  (kmp_int64)(sv))
662 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
663  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
664  (kmp_int64)(sv))
665 
666 #if KMP_ARCH_X86
667 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
668  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
669  (kmp_int32)(sv))
670 #else /* 64 bit pointers */
671 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
672  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
673  (kmp_int64)(sv))
674 #endif /* KMP_ARCH_X86 */
675 
676 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
677  __kmp_compare_and_store_ret8((p), (cv), (sv))
678 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
679  __kmp_compare_and_store_ret16((p), (cv), (sv))
680 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
681  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
682  (kmp_int64)(sv))
683 
684 #define KMP_XCHG_FIXED8(p, v) \
685  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
686 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
687 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
688 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
689 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
690 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
691 #endif
692 
693 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
694 
695 /* cast p to correct type so that proper intrinsic will be used */
696 #define KMP_TEST_THEN_INC32(p) \
697  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
698 #define KMP_TEST_THEN_INC_ACQ32(p) \
699  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
700 #if KMP_ARCH_MIPS
701 #define KMP_TEST_THEN_INC64(p) \
702  __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
703 #define KMP_TEST_THEN_INC_ACQ64(p) \
704  __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
705 #else
706 #define KMP_TEST_THEN_INC64(p) \
707  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
708 #define KMP_TEST_THEN_INC_ACQ64(p) \
709  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
710 #endif
711 #define KMP_TEST_THEN_ADD4_32(p) \
712  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
713 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
714  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
715 #if KMP_ARCH_MIPS
716 #define KMP_TEST_THEN_ADD4_64(p) \
717  __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
718 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
719  __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
720 #define KMP_TEST_THEN_DEC64(p) \
721  __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
722 #define KMP_TEST_THEN_DEC_ACQ64(p) \
723  __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
724 #else
725 #define KMP_TEST_THEN_ADD4_64(p) \
726  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
727 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
728  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
729 #define KMP_TEST_THEN_DEC64(p) \
730  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
731 #define KMP_TEST_THEN_DEC_ACQ64(p) \
732  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
733 #endif
734 #define KMP_TEST_THEN_DEC32(p) \
735  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
736 #define KMP_TEST_THEN_DEC_ACQ32(p) \
737  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
738 #define KMP_TEST_THEN_ADD8(p, v) \
739  __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
740 #define KMP_TEST_THEN_ADD32(p, v) \
741  __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
742 #if KMP_ARCH_MIPS
743 #define KMP_TEST_THEN_ADD64(p, v) \
744  __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
745  __ATOMIC_SEQ_CST)
746 #else
747 #define KMP_TEST_THEN_ADD64(p, v) \
748  __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
749 #endif
750 
751 #define KMP_TEST_THEN_OR8(p, v) \
752  __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
753 #define KMP_TEST_THEN_AND8(p, v) \
754  __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
755 #define KMP_TEST_THEN_OR32(p, v) \
756  __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
757 #define KMP_TEST_THEN_AND32(p, v) \
758  __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
759 #if KMP_ARCH_MIPS
760 #define KMP_TEST_THEN_OR64(p, v) \
761  __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
762  __ATOMIC_SEQ_CST)
763 #define KMP_TEST_THEN_AND64(p, v) \
764  __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
765  __ATOMIC_SEQ_CST)
766 #else
767 #define KMP_TEST_THEN_OR64(p, v) \
768  __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
769 #define KMP_TEST_THEN_AND64(p, v) \
770  __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
771 #endif
772 
773 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
774  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
775  (kmp_uint8)(sv))
776 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
777  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
778  (kmp_uint8)(sv))
779 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
780  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
781  (kmp_uint16)(sv))
782 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
783  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
784  (kmp_uint16)(sv))
785 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
786  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
787  (kmp_uint32)(sv))
788 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
789  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
790  (kmp_uint32)(sv))
791 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
792  __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
793  (void *)(sv))
794 
795 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
796  __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
797  (kmp_uint8)(sv))
798 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
799  __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
800  (kmp_uint16)(sv))
801 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
802  __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
803  (kmp_uint32)(sv))
804 #if KMP_ARCH_MIPS
805 static inline bool mips_sync_bool_compare_and_swap(volatile kmp_uint64 *p,
806  kmp_uint64 cv,
807  kmp_uint64 sv) {
808  return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
809  __ATOMIC_SEQ_CST);
810 }
811 static inline bool mips_sync_val_compare_and_swap(volatile kmp_uint64 *p,
812  kmp_uint64 cv,
813  kmp_uint64 sv) {
814  __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
815  __ATOMIC_SEQ_CST);
816  return cv;
817 }
818 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
819  mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
820  (kmp_uint64)(cv), (kmp_uint64)(sv))
821 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
822  mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
823  (kmp_uint64)(cv), (kmp_uint64)(sv))
824 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
825  mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
826  (kmp_uint64)(sv))
827 #else
828 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
829  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
830  (kmp_uint64)(sv))
831 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
832  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
833  (kmp_uint64)(sv))
834 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
835  __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
836  (kmp_uint64)(sv))
837 #endif
838 
839 #define KMP_XCHG_FIXED8(p, v) \
840  __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
841 #define KMP_XCHG_FIXED16(p, v) \
842  __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
843 #define KMP_XCHG_FIXED32(p, v) \
844  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
845 #define KMP_XCHG_FIXED64(p, v) \
846  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
847 
848 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
849  kmp_int32 tmp =
850  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
851  return *(kmp_real32 *)&tmp;
852 }
853 
854 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
855  kmp_int64 tmp =
856  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
857  return *(kmp_real64 *)&tmp;
858 }
859 
860 #else
861 
862 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
863 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
864 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
865 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
866 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
867 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
868 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
869 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
870 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
871 
872 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
873  kmp_int8 sv);
874 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
875  kmp_int16 sv);
876 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
877  kmp_int32 sv);
878 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
879  kmp_int64 sv);
880 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
881  kmp_int8 sv);
882 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
883  kmp_int16 cv, kmp_int16 sv);
884 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
885  kmp_int32 cv, kmp_int32 sv);
886 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
887  kmp_int64 cv, kmp_int64 sv);
888 
889 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
890 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
891 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
892 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
893 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
894 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
895 
896 #define KMP_TEST_THEN_INC32(p) \
897  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
898 #define KMP_TEST_THEN_INC_ACQ32(p) \
899  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
900 #define KMP_TEST_THEN_INC64(p) \
901  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
902 #define KMP_TEST_THEN_INC_ACQ64(p) \
903  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
904 #define KMP_TEST_THEN_ADD4_32(p) \
905  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
906 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
907  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
908 #define KMP_TEST_THEN_ADD4_64(p) \
909  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
910 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
911  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
912 #define KMP_TEST_THEN_DEC32(p) \
913  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
914 #define KMP_TEST_THEN_DEC_ACQ32(p) \
915  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
916 #define KMP_TEST_THEN_DEC64(p) \
917  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
918 #define KMP_TEST_THEN_DEC_ACQ64(p) \
919  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
920 #define KMP_TEST_THEN_ADD8(p, v) \
921  __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
922 #define KMP_TEST_THEN_ADD32(p, v) \
923  __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
924 #define KMP_TEST_THEN_ADD64(p, v) \
925  __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
926 
927 #define KMP_TEST_THEN_OR8(p, v) \
928  __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
929 #define KMP_TEST_THEN_AND8(p, v) \
930  __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
931 #define KMP_TEST_THEN_OR32(p, v) \
932  __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
933 #define KMP_TEST_THEN_AND32(p, v) \
934  __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
935 #define KMP_TEST_THEN_OR64(p, v) \
936  __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
937 #define KMP_TEST_THEN_AND64(p, v) \
938  __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
939 
940 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
941  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
942  (kmp_int8)(sv))
943 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
944  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
945  (kmp_int8)(sv))
946 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
947  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
948  (kmp_int16)(sv))
949 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
950  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
951  (kmp_int16)(sv))
952 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
953  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
954  (kmp_int32)(sv))
955 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
956  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
957  (kmp_int32)(sv))
958 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
959  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
960  (kmp_int64)(sv))
961 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
962  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
963  (kmp_int64)(sv))
964 
965 #if KMP_ARCH_X86
966 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
967  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
968  (kmp_int32)(sv))
969 #else /* 64 bit pointers */
970 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
971  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
972  (kmp_int64)(sv))
973 #endif /* KMP_ARCH_X86 */
974 
975 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
976  __kmp_compare_and_store_ret8((p), (cv), (sv))
977 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
978  __kmp_compare_and_store_ret16((p), (cv), (sv))
979 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
980  __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
981  (kmp_int32)(sv))
982 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
983  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
984  (kmp_int64)(sv))
985 
986 #define KMP_XCHG_FIXED8(p, v) \
987  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
988 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
989 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
990 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
991 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
992 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
993 
994 #endif /* KMP_ASM_INTRINS */
995 
996 /* ------------- relaxed consistency memory model stuff ------------------ */
997 
998 #if KMP_OS_WINDOWS
999 #ifdef __ABSOFT_WIN
1000 #define KMP_MB() asm("nop")
1001 #define KMP_IMB() asm("nop")
1002 #else
1003 #define KMP_MB() /* _asm{ nop } */
1004 #define KMP_IMB() /* _asm{ nop } */
1005 #endif
1006 #endif /* KMP_OS_WINDOWS */
1007 
1008 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
1009  KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
1010 #if KMP_OS_WINDOWS
1011 #undef KMP_MB
1012 #define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1013 #else /* !KMP_OS_WINDOWS */
1014 #define KMP_MB() __sync_synchronize()
1015 #endif
1016 #endif
1017 
1018 #ifndef KMP_MB
1019 #define KMP_MB() /* nothing to do */
1020 #endif
1021 
1022 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
1023 #if KMP_COMPILER_ICC
1024 #define KMP_MFENCE_() _mm_mfence()
1025 #define KMP_SFENCE_() _mm_sfence()
1026 #elif KMP_COMPILER_MSVC
1027 #define KMP_MFENCE_() MemoryBarrier()
1028 #define KMP_SFENCE_() MemoryBarrier()
1029 #else
1030 #define KMP_MFENCE_() __sync_synchronize()
1031 #define KMP_SFENCE_() __sync_synchronize()
1032 #endif
1033 #define KMP_MFENCE() \
1034  if (UNLIKELY(!__kmp_cpuinfo.initialized)) { \
1035  __kmp_query_cpuid(&__kmp_cpuinfo); \
1036  } \
1037  if (__kmp_cpuinfo.sse2) { \
1038  KMP_MFENCE_(); \
1039  }
1040 #define KMP_SFENCE() KMP_SFENCE_()
1041 #endif
1042 
1043 #ifndef KMP_IMB
1044 #define KMP_IMB() /* nothing to do */
1045 #endif
1046 
1047 #ifndef KMP_ST_REL32
1048 #define KMP_ST_REL32(A, D) (*(A) = (D))
1049 #endif
1050 
1051 #ifndef KMP_ST_REL64
1052 #define KMP_ST_REL64(A, D) (*(A) = (D))
1053 #endif
1054 
1055 #ifndef KMP_LD_ACQ32
1056 #define KMP_LD_ACQ32(A) (*(A))
1057 #endif
1058 
1059 #ifndef KMP_LD_ACQ64
1060 #define KMP_LD_ACQ64(A) (*(A))
1061 #endif
1062 
1063 /* ------------------------------------------------------------------------ */
1064 // FIXME - maybe this should this be
1065 //
1066 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
1067 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
1068 //
1069 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
1070 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
1071 //
1072 // I'm fairly certain this is the correct thing to do, but I'm afraid
1073 // of performance regressions.
1074 
1075 #define TCR_1(a) (a)
1076 #define TCW_1(a, b) (a) = (b)
1077 #define TCR_4(a) (a)
1078 #define TCW_4(a, b) (a) = (b)
1079 #define TCI_4(a) (++(a))
1080 #define TCD_4(a) (--(a))
1081 #define TCR_8(a) (a)
1082 #define TCW_8(a, b) (a) = (b)
1083 #define TCI_8(a) (++(a))
1084 #define TCD_8(a) (--(a))
1085 #define TCR_SYNC_4(a) (a)
1086 #define TCW_SYNC_4(a, b) (a) = (b)
1087 #define TCX_SYNC_4(a, b, c) \
1088  KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
1089  (kmp_int32)(b), (kmp_int32)(c))
1090 #define TCR_SYNC_8(a) (a)
1091 #define TCW_SYNC_8(a, b) (a) = (b)
1092 #define TCX_SYNC_8(a, b, c) \
1093  KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1094  (kmp_int64)(b), (kmp_int64)(c))
1095 
1096 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
1097 // What about ARM?
1098 #define TCR_PTR(a) ((void *)TCR_4(a))
1099 #define TCW_PTR(a, b) TCW_4((a), (b))
1100 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1101 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1102 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1103 
1104 #else /* 64 bit pointers */
1105 
1106 #define TCR_PTR(a) ((void *)TCR_8(a))
1107 #define TCW_PTR(a, b) TCW_8((a), (b))
1108 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1109 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1110 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1111 
1112 #endif /* KMP_ARCH_X86 */
1113 
1114 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
1115  where they are used to check that language is Fortran, not C. */
1116 
1117 #ifndef FTN_TRUE
1118 #define FTN_TRUE TRUE
1119 #endif
1120 
1121 #ifndef FTN_FALSE
1122 #define FTN_FALSE FALSE
1123 #endif
1124 
1125 typedef void (*microtask_t)(int *gtid, int *npr, ...);
1126 
1127 #ifdef USE_VOLATILE_CAST
1128 #define VOLATILE_CAST(x) (volatile x)
1129 #else
1130 #define VOLATILE_CAST(x) (x)
1131 #endif
1132 
1133 #define KMP_WAIT __kmp_wait_4
1134 #define KMP_WAIT_PTR __kmp_wait_4_ptr
1135 #define KMP_EQ __kmp_eq_4
1136 #define KMP_NEQ __kmp_neq_4
1137 #define KMP_LT __kmp_lt_4
1138 #define KMP_GE __kmp_ge_4
1139 #define KMP_LE __kmp_le_4
1140 
1141 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
1142  * (Intel(R) 64 Tracker #138) */
1143 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1144 #define STATIC_EFI2_WORKAROUND
1145 #else
1146 #define STATIC_EFI2_WORKAROUND static
1147 #endif
1148 
1149 // Support of BGET usage
1150 #ifndef KMP_USE_BGET
1151 #define KMP_USE_BGET 1
1152 #endif
1153 
1154 // Switches for OSS builds
1155 #ifndef USE_CMPXCHG_FIX
1156 #define USE_CMPXCHG_FIX 1
1157 #endif
1158 
1159 // Enable dynamic user lock
1160 #define KMP_USE_DYNAMIC_LOCK 1
1161 
1162 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
1163 // dynamic user lock is turned on
1164 #if KMP_USE_DYNAMIC_LOCK
1165 // Visual studio can't handle the asm sections in this code
1166 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1167 #ifdef KMP_USE_ADAPTIVE_LOCKS
1168 #undef KMP_USE_ADAPTIVE_LOCKS
1169 #endif
1170 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1171 #endif
1172 
1173 // Enable tick time conversion of ticks to seconds
1174 #if KMP_STATS_ENABLED
1175 #define KMP_HAVE_TICK_TIME \
1176  (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1177 #endif
1178 
1179 // Warning levels
1180 enum kmp_warnings_level {
1181  kmp_warnings_off = 0, /* No warnings */
1182  kmp_warnings_low, /* Minimal warnings (default) */
1183  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
1184  kmp_warnings_verbose /* reserved */
1185 };
1186 
1187 #ifdef __cplusplus
1188 } // extern "C"
1189 #endif // __cplusplus
1190 
1191 // Safe C API
1192 #include "kmp_safe_c_api.h"
1193 
1194 // Macros for C++11 atomic functions
1195 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1196 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1197 
1198 // For non-default load/store
1199 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1200 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1201 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1202 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1203 
1204 // For non-default fetch_<op>
1205 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1206 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1207 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1208 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1209 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1210 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1211 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1212 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1213 
1214 // Callers of the following functions cannot see the side effect on "expected".
1215 template <typename T>
1216 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1217  return p->compare_exchange_strong(
1218  expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1219 }
1220 
1221 template <typename T>
1222 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1223  return p->compare_exchange_strong(
1224  expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1225 }
1226 
1227 template <typename T>
1228 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1229  return p->compare_exchange_strong(
1230  expected, desired, std::memory_order_release, std::memory_order_relaxed);
1231 }
1232 
1233 // Symbol lookup on Linux/Windows
1234 #if KMP_OS_WINDOWS
1235 extern void *__kmp_lookup_symbol(const char *name);
1236 #define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1237 #define KMP_DLSYM_NEXT(name) nullptr
1238 #else
1239 #define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1240 #define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)
1241 #endif
1242 
1243 #endif /* KMP_OS_H */