19 #include "kmp_config.h" 22 #define KMP_FTN_PLAIN 1 23 #define KMP_FTN_APPEND 2 24 #define KMP_FTN_UPPER 3 30 #define KMP_PTR_SKIP (sizeof(void*)) 37 #define KMP_MEM_CONS_VOLATILE 0 38 #define KMP_MEM_CONS_FENCE 1 40 #ifndef KMP_MEM_CONS_MODEL 41 # define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE 45 #define KMP_COMPILER_ICC 0 46 #define KMP_COMPILER_GCC 0 47 #define KMP_COMPILER_CLANG 0 48 #define KMP_COMPILER_MSVC 0 50 #if defined( __INTEL_COMPILER ) 51 # undef KMP_COMPILER_ICC 52 # define KMP_COMPILER_ICC 1 53 #elif defined( __clang__ ) 54 # undef KMP_COMPILER_CLANG 55 # define KMP_COMPILER_CLANG 1 56 #elif defined( __GNUC__ ) 57 # undef KMP_COMPILER_GCC 58 # define KMP_COMPILER_GCC 1 59 #elif defined( _MSC_VER ) 60 # undef KMP_COMPILER_MSVC 61 # define KMP_COMPILER_MSVC 1 63 # error Unknown compiler 66 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK && !KMP_ARCH_PPC64 67 # define KMP_AFFINITY_SUPPORTED 1 68 # if KMP_OS_WINDOWS && KMP_ARCH_X86_64 69 # define KMP_GROUP_AFFINITY 1 71 # define KMP_GROUP_AFFINITY 0 74 # define KMP_AFFINITY_SUPPORTED 0 75 # define KMP_GROUP_AFFINITY 0 79 #define KMP_HAVE_QUAD 0 80 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 84 # define KMP_HAVE_QUAD 1 85 # elif KMP_COMPILER_CLANG 88 typedef long double _Quad;
89 # elif KMP_COMPILER_GCC 90 typedef __float128 _Quad;
92 # define KMP_HAVE_QUAD 1 93 # elif KMP_COMPILER_MSVC 94 typedef long double _Quad;
97 # if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC 98 typedef long double _Quad;
100 # define KMP_HAVE_QUAD 1 105 typedef char kmp_int8;
106 typedef unsigned char kmp_uint8;
107 typedef short kmp_int16;
108 typedef unsigned short kmp_uint16;
109 typedef int kmp_int32;
110 typedef unsigned int kmp_uint32;
111 # define KMP_INT32_SPEC "d" 112 # define KMP_UINT32_SPEC "u" 113 # ifndef KMP_STRUCT64 114 typedef __int64 kmp_int64;
115 typedef unsigned __int64 kmp_uint64;
116 #define KMP_INT64_SPEC "I64d" 117 #define KMP_UINT64_SPEC "I64u" 119 struct kmp_struct64 {
122 typedef struct kmp_struct64 kmp_int64;
123 typedef struct kmp_struct64 kmp_uint64;
127 # define KMP_INTPTR 1 128 typedef __int64 kmp_intptr_t;
129 typedef unsigned __int64 kmp_uintptr_t;
130 # define KMP_INTPTR_SPEC "I64d" 131 # define KMP_UINTPTR_SPEC "I64u" 136 typedef char kmp_int8;
137 typedef unsigned char kmp_uint8;
138 typedef short kmp_int16;
139 typedef unsigned short kmp_uint16;
140 typedef int kmp_int32;
141 typedef unsigned int kmp_uint32;
142 typedef long long kmp_int64;
143 typedef unsigned long long kmp_uint64;
144 # define KMP_INT32_SPEC "d" 145 # define KMP_UINT32_SPEC "u" 146 # define KMP_INT64_SPEC "lld" 147 # define KMP_UINT64_SPEC "llu" 150 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS 151 # define KMP_SIZE_T_SPEC KMP_UINT32_SPEC 152 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64 153 # define KMP_SIZE_T_SPEC KMP_UINT64_SPEC 155 # error "Can't determine size_t printf format specifier." 159 # define KMP_SIZE_T_MAX (0xFFFFFFFF) 161 # define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF) 164 typedef size_t kmp_size_t;
165 typedef float kmp_real32;
166 typedef double kmp_real64;
169 # define KMP_INTPTR 1 170 typedef long kmp_intptr_t;
171 typedef unsigned long kmp_uintptr_t;
172 # define KMP_INTPTR_SPEC "ld" 173 # define KMP_UINTPTR_SPEC "lu" 177 typedef kmp_int64 kmp_int;
178 typedef kmp_uint64 kmp_uint;
180 typedef kmp_int32 kmp_int;
181 typedef kmp_uint32 kmp_uint;
183 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF) 184 #define KMP_INT_MIN ((kmp_int32)0x80000000) 190 template<
typename T >
193 typedef T unsigned_t;
194 typedef T floating_t;
195 static char const * spec;
199 struct traits_t< signed int > {
200 typedef signed int signed_t;
201 typedef unsigned int unsigned_t;
202 typedef double floating_t;
203 static char const * spec;
207 struct traits_t< unsigned int > {
208 typedef signed int signed_t;
209 typedef unsigned int unsigned_t;
210 typedef double floating_t;
211 static char const * spec;
215 struct traits_t< signed long long > {
216 typedef signed long long signed_t;
217 typedef unsigned long long unsigned_t;
218 typedef long double floating_t;
219 static char const * spec;
223 struct traits_t< unsigned long long > {
224 typedef signed long long signed_t;
225 typedef unsigned long long unsigned_t;
226 typedef long double floating_t;
227 static char const * spec;
230 #endif // __cplusplus 232 #define KMP_EXPORT extern 235 #define __forceinline __inline 238 #define PAGE_SIZE (0x4000) 239 #define PAGE_ALIGNED(_addr) ( ! ((size_t) _addr & \ 240 (size_t)(PAGE_SIZE - 1))) 241 #define ALIGN_TO_PAGE(x) (void *)(((size_t)(x)) & ~((size_t)(PAGE_SIZE - 1))) 247 #endif // __cplusplus 249 #define INTERNODE_CACHE_LINE 4096 253 #define CACHE_LINE 128 255 #if ( CACHE_LINE < 64 ) && ! defined( KMP_OS_DARWIN ) 257 #warning CACHE_LINE is too small. 261 #define KMP_CACHE_PREFETCH(ADDR) 265 #if KMP_OS_UNIX && defined(__GNUC__) 266 # define KMP_DO_ALIGN(bytes) __attribute__((aligned(bytes))) 267 # define KMP_ALIGN_CACHE __attribute__((aligned(CACHE_LINE))) 268 # define KMP_ALIGN_CACHE_INTERNODE __attribute__((aligned(INTERNODE_CACHE_LINE))) 269 # define KMP_ALIGN(bytes) __attribute__((aligned(bytes))) 271 # define KMP_DO_ALIGN(bytes) __declspec( align(bytes) ) 272 # define KMP_ALIGN_CACHE __declspec( align(CACHE_LINE) ) 273 # define KMP_ALIGN_CACHE_INTERNODE __declspec( align(INTERNODE_CACHE_LINE) ) 274 # define KMP_ALIGN(bytes) __declspec( align(bytes) ) 278 enum kmp_mem_fence_type {
290 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS 294 #pragma intrinsic(InterlockedExchangeAdd) 295 #pragma intrinsic(InterlockedCompareExchange) 296 #pragma intrinsic(InterlockedExchange) 297 #pragma intrinsic(InterlockedExchange64) 303 # define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd( (volatile long *)(p), 1 ) 304 # define KMP_TEST_THEN_INC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), 1 ) 305 # define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd( (volatile long *)(p), 4 ) 306 # define KMP_TEST_THEN_ADD4_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), 4 ) 307 # define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 ) 308 # define KMP_TEST_THEN_DEC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 ) 309 # define KMP_TEST_THEN_ADD32(p, v) InterlockedExchangeAdd( (volatile long *)(p), (v) ) 311 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v );
312 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v );
313 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v );
314 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) InterlockedCompareExchange( (volatile long *)(p),(long)(sv),(long)(cv) ) 316 # define KMP_XCHG_FIXED32(p, v) InterlockedExchange( (volatile long *)(p), (long)(v) ) 317 # define KMP_XCHG_FIXED64(p, v) InterlockedExchange64( (volatile kmp_int64 *)(p), (kmp_int64)(v) ) 319 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v)
321 kmp_int32 tmp = InterlockedExchange( (
volatile long *)p, *(
long *)&v);
322 return *(kmp_real32*)&tmp;
328 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v );
329 extern kmp_int32 __kmp_test_then_or32(
volatile kmp_int32 *p, kmp_int32 v );
330 extern kmp_int32 __kmp_test_then_and32(
volatile kmp_int32 *p, kmp_int32 v );
331 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v );
332 extern kmp_int64 __kmp_test_then_or64(
volatile kmp_int64 *p, kmp_int64 v );
333 extern kmp_int64 __kmp_test_then_and64(
volatile kmp_int64 *p, kmp_int64 v );
335 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
336 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
337 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
338 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
339 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
340 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
341 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
342 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
344 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v );
345 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v );
346 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v );
347 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v );
348 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v );
349 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v );
350 # define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) ) 353 # define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) ) 354 # define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) ) 356 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL ) 357 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL ) 360 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL ) 361 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL ) 364 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL ) 365 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL ) 367 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) ) 369 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) ) 370 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) ) 371 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) ) 372 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) ) 374 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) ) 375 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) ) 376 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) ) 377 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) ) 378 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) ) 379 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) ) 380 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) ) 381 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) ) 384 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) ) 386 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) ) 389 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) ) 390 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) ) 392 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) ) 394 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (volatile kmp_int8*)(p), (kmp_int8)(v) ); 395 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) ); 399 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) ); 402 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64) 403 # define KMP_TEST_THEN_ADD8(p, v) __sync_fetch_and_add( (kmp_int8 *)(p), (v) ) 406 # define KMP_TEST_THEN_INC32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 ) 407 # define KMP_TEST_THEN_OR8(p, v) __sync_fetch_and_or( (kmp_int8 *)(p), (v) ) 408 # define KMP_TEST_THEN_AND8(p, v) __sync_fetch_and_and( (kmp_int8 *)(p), (v) ) 409 # define KMP_TEST_THEN_INC_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 ) 410 # define KMP_TEST_THEN_INC64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL ) 411 # define KMP_TEST_THEN_INC_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL ) 412 # define KMP_TEST_THEN_ADD4_32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 ) 413 # define KMP_TEST_THEN_ADD4_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 ) 414 # define KMP_TEST_THEN_ADD4_64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL ) 415 # define KMP_TEST_THEN_ADD4_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL ) 416 # define KMP_TEST_THEN_DEC32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 ) 417 # define KMP_TEST_THEN_DEC_ACQ32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 ) 418 # define KMP_TEST_THEN_DEC64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL ) 419 # define KMP_TEST_THEN_DEC_ACQ64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL ) 420 # define KMP_TEST_THEN_ADD32(p, v) __sync_fetch_and_add( (kmp_int32 *)(p), (v) ) 421 # define KMP_TEST_THEN_ADD64(p, v) __sync_fetch_and_add( (kmp_int64 *)(p), (v) ) 423 # define KMP_TEST_THEN_OR32(p, v) __sync_fetch_and_or( (kmp_int32 *)(p), (v) ) 424 # define KMP_TEST_THEN_AND32(p, v) __sync_fetch_and_and( (kmp_int32 *)(p), (v) ) 425 # define KMP_TEST_THEN_OR64(p, v) __sync_fetch_and_or( (kmp_int64 *)(p), (v) ) 426 # define KMP_TEST_THEN_AND64(p, v) __sync_fetch_and_and( (kmp_int64 *)(p), (v) ) 428 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) ) 429 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) ) 430 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) ) 431 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) ) 432 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) ) 433 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) ) 434 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) ) 435 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) ) 436 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __sync_bool_compare_and_swap( (volatile void **)(p),(void *)(cv),(void *)(sv) ) 438 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) ) 439 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) ) 440 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) ) 441 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) ) 443 #define KMP_XCHG_FIXED8(p, v) __sync_lock_test_and_set( (volatile kmp_uint8 *)(p), (kmp_uint8)(v) ) 444 #define KMP_XCHG_FIXED16(p, v) __sync_lock_test_and_set( (volatile kmp_uint16 *)(p), (kmp_uint16)(v) ) 445 #define KMP_XCHG_FIXED32(p, v) __sync_lock_test_and_set( (volatile kmp_uint32 *)(p), (kmp_uint32)(v) ) 446 #define KMP_XCHG_FIXED64(p, v) __sync_lock_test_and_set( (volatile kmp_uint64 *)(p), (kmp_uint64)(v) ) 448 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v );
449 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v );
450 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v );
451 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v)
453 kmp_int32 tmp = __sync_lock_test_and_set( (kmp_int32*)p, *(kmp_int32*)&v);
454 return *(kmp_real32*)&tmp;
457 inline kmp_real64 KMP_XCHG_REAL64(
volatile kmp_real64 *p, kmp_real64 v)
459 kmp_int64 tmp = __sync_lock_test_and_set( (kmp_int64*)p, *(kmp_int64*)&v);
460 return *(kmp_real64*)&tmp;
465 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v );
466 extern kmp_int32 __kmp_test_then_or32(
volatile kmp_int32 *p, kmp_int32 v );
467 extern kmp_int32 __kmp_test_then_and32(
volatile kmp_int32 *p, kmp_int32 v );
468 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v );
469 extern kmp_int64 __kmp_test_then_or64(
volatile kmp_int64 *p, kmp_int64 v );
470 extern kmp_int64 __kmp_test_then_and64(
volatile kmp_int64 *p, kmp_int64 v );
472 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
473 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
474 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
475 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
476 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
477 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
478 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
479 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
481 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v );
482 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v );
483 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v );
484 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v );
485 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v );
486 # define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) ) 487 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v );
489 # define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32( (p), 1 ) 490 # define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) ) 491 # define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) ) 492 # define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32( (p), 1 ) 493 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL ) 494 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL ) 495 # define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32( (p), 4 ) 496 # define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32( (p), 4 ) 497 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL ) 498 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL ) 499 # define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32( (p), -1 ) 500 # define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32( (p), -1 ) 501 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL ) 502 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL ) 503 # define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32( (p), (v) ) 504 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) ) 506 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) ) 507 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) ) 508 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) ) 509 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) ) 511 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) ) 512 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) ) 513 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) ) 514 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) ) 515 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) ) 516 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) ) 517 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) ) 518 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) ) 521 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) ) 523 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) ) 526 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) ) 527 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) ) 528 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __kmp_compare_and_store_ret32( (p), (cv), (sv) ) 529 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) ) 531 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (volatile kmp_int8*)(p), (kmp_int8)(v) ); 532 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) ); 533 # define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32( (p), (v) ); 534 # define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64( (p), (v) ); 535 # define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32( (p), (v) ); 536 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) ); 545 # define KMP_MB() asm ("nop") 546 # define KMP_IMB() asm ("nop") 553 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || KMP_ARCH_MIPS64 554 # define KMP_MB() __sync_synchronize() 566 # define KMP_ST_REL32(A,D) ( *(A) = (D) ) 570 # define KMP_ST_REL64(A,D) ( *(A) = (D) ) 574 # define KMP_LD_ACQ32(A) ( *(A) ) 578 # define KMP_LD_ACQ64(A) ( *(A) ) 582 #define TCW_1(a,b) (a) = (b) 598 #define TCW_4(a,b) (a) = (b) 599 #define TCI_4(a) (++(a)) 600 #define TCD_4(a) (--(a)) 602 #define TCW_8(a,b) (a) = (b) 603 #define TCI_8(a) (++(a)) 604 #define TCD_8(a) (--(a)) 605 #define TCR_SYNC_4(a) (a) 606 #define TCW_SYNC_4(a,b) (a) = (b) 607 #define TCX_SYNC_4(a,b,c) KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), (kmp_int32)(b), (kmp_int32)(c)) 608 #define TCR_SYNC_8(a) (a) 609 #define TCW_SYNC_8(a,b) (a) = (b) 610 #define TCX_SYNC_8(a,b,c) KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), (kmp_int64)(b), (kmp_int64)(c)) 612 #if KMP_ARCH_X86 || KMP_ARCH_MIPS 614 #define TCR_PTR(a) ((void *)TCR_4(a)) 615 #define TCW_PTR(a,b) TCW_4((a),(b)) 616 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a)) 617 #define TCW_SYNC_PTR(a,b) TCW_SYNC_4((a),(b)) 618 #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_4((a),(b),(c))) 622 #define TCR_PTR(a) ((void *)TCR_8(a)) 623 #define TCW_PTR(a,b) TCW_8((a),(b)) 624 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a)) 625 #define TCW_SYNC_PTR(a,b) TCW_SYNC_8((a),(b)) 626 #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_8((a),(b),(c))) 637 # define FTN_TRUE TRUE 641 # define FTN_FALSE FALSE 644 typedef void (*microtask_t)(
int *gtid,
int *npr, ... );
646 #ifdef USE_VOLATILE_CAST 647 # define VOLATILE_CAST(x) (volatile x) 649 # define VOLATILE_CAST(x) (x) 652 #define KMP_WAIT_YIELD __kmp_wait_yield_4 653 #define KMP_WAIT_YIELD_PTR __kmp_wait_yield_4_ptr 654 #define KMP_EQ __kmp_eq_4 655 #define KMP_NEQ __kmp_neq_4 656 #define KMP_LT __kmp_lt_4 657 #define KMP_GE __kmp_ge_4 658 #define KMP_LE __kmp_le_4 661 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX 662 # define STATIC_EFI2_WORKAROUND 664 # define STATIC_EFI2_WORKAROUND static 669 #define KMP_USE_BGET 1 674 #ifndef USE_SYSFS_INFO 675 # define USE_SYSFS_INFO 0 677 #ifndef USE_CMPXCHG_FIX 678 # define USE_CMPXCHG_FIX 1 683 # define KMP_USE_DYNAMIC_LOCK 1 687 #if KMP_USE_DYNAMIC_LOCK 689 # define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC 690 # ifdef KMP_USE_ADAPTIVE_LOCKS 691 # undef KMP_USE_ADAPTIVE_LOCKS 693 # define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX 697 #if KMP_STATS_ENABLED 698 # define KMP_HAVE_TICK_TIME (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64)) 702 enum kmp_warnings_level {
703 kmp_warnings_off = 0,
705 kmp_warnings_explicit = 6,
711 #endif // __cplusplus 715 #include "kmp_safe_c_api.h"