17 #if !defined(__TBB_machine_H) || defined(__TBB_machine_gcc_power_H)
18 #error Do not #include this internal file directly; use public TBB headers instead.
21 #define __TBB_machine_gcc_power_H
30 #if __powerpc64__ || __ppc64__
33 #define __TBB_WORDSIZE 8
35 #define __TBB_WORDSIZE 4
42 #if __BIG_ENDIAN__ || (defined(__BYTE_ORDER__) && __BYTE_ORDER__==__ORDER_BIG_ENDIAN__)
43 #define __TBB_ENDIANNESS __TBB_ENDIAN_BIG
44 #elif __LITTLE_ENDIAN__ || (defined(__BYTE_ORDER__) && __BYTE_ORDER__==__ORDER_LITTLE_ENDIAN__)
45 #define __TBB_ENDIANNESS __TBB_ENDIAN_LITTLE
46 #elif defined(__BYTE_ORDER__)
47 #define __TBB_ENDIANNESS __TBB_ENDIAN_UNSUPPORTED
49 #define __TBB_ENDIANNESS __TBB_ENDIAN_DETECT
55 #define __TBB_64BIT_ATOMICS 1
58 #define __TBB_64BIT_ATOMICS 0
68 #ifndef __TBB_64BIT_ATOMICS
69 #define __TBB_64BIT_ATOMICS 0
77 __asm__ __volatile__(
"sync\n"
79 "lwarx %[res],0,%[ptr]\n\t"
80 "cmpw %[res],%[cmp]\n\t"
82 "stwcx. %[val],0,%[ptr]\n\t"
87 ,
"+m"(* (int32_t*) ptr)
102 __asm__ __volatile__(
"sync\n"
104 "ldarx %[res],0,%[ptr]\n\t"
105 "cmpd %[res],%[cmp]\n\t"
107 "stdcx. %[val],0,%[ptr]\n\t"
112 ,
"+m"(* (int64_t*) ptr)
115 , [cmp]
"r"(comparand)
122 #elif __TBB_64BIT_ATOMICS
127 int64_t value_register, comparand_register, result_register;
128 __asm__ __volatile__(
"sync\n\t"
129 "ld %[val],%[valm]\n\t"
130 "ld %[cmp],%[cmpm]\n"
132 "ldarx %[res],0,%[ptr]\n\t"
133 "cmpd %[res],%[cmp]\n\t"
135 "stdcx. %[val],0,%[ptr]\n\t"
138 "std %[res],%[resm]\n\t"
141 , [res]
"=&r"( result_register)
142 , [val]
"=&r"( value_register)
143 , [cmp]
"=&r"(comparand_register)
144 ,
"+m"(* (int64_t*) ptr)
147 , [cmpm]
"m"(comparand)
156 #define __TBB_MACHINE_DEFINE_LOAD_STORE(S,ldx,stx,cmpx) \
157 template <typename T> \
158 struct machine_load_store<T,S> { \
159 static inline T load_with_acquire(const volatile T& location) { \
161 __asm__ __volatile__(ldx " %[res],0(%[ptr])\n" \
163 cmpx " %[res],%[res]\n\t" \
166 : [res]"=r"(result) \
167 : [ptr]"b"(&location)
\
173 static inline void store_with_release(volatile T &location, T value) { \
174 __asm__ __volatile__("lwsync\n\t" \
175 stx " %[val],0(%[ptr])" \
177 : [ptr]"b"(&location)
\
183 template <typename T> \
184 struct machine_load_store_relaxed<T,S> { \
185 static inline T load (const __TBB_atomic T& location) { \
187 __asm__ __volatile__(ldx " %[res],0(%[ptr])" \
188 : [res]"=r"(result) \
189 : [ptr]"b"(&location)
\
194 static inline void store (__TBB_atomic T &location, T value) { \
195 __asm__ __volatile__(stx " %[val],0(%[ptr])" \
197 : [ptr]"b"(&location)
\
209 #if __TBB_WORDSIZE==8
213 #elif __TBB_64BIT_ATOMICS
215 template <
typename T>
216 struct machine_load_store<T,8> {
220 __asm__ __volatile__(
"ld %[res],0(%[ptr])\n\t"
221 "std %[res],%[resm]\n"
223 "cmpd %[res],%[res]\n\t"
227 , [res]
"=&r"(result_register)
228 : [ptr]
"b"(&location)
237 __asm__ __volatile__(
"lwsync\n\t"
238 "ld %[val],%[valm]\n\t"
239 "std %[val],0(%[ptr])"
241 , [val]
"=&r"(value_register)
242 : [ptr]
"b"(&location)
248 struct machine_load_store_relaxed<T,8> {
249 static inline T
load (
const volatile T& location) {
252 __asm__ __volatile__(
"ld %[res],0(%[ptr])\n\t"
255 , [res]
"=&r"(result_register)
256 : [ptr]
"b"(&location)
262 static inline void store (
volatile T &location, T
value) {
264 __asm__ __volatile__(
"ld %[val],%[valm]\n\t"
265 "std %[val],0(%[ptr])"
267 , [val]
"=&r"(value_register)
268 : [ptr]
"b"(&location)
273 #define __TBB_machine_load_store_relaxed_8
279 #undef __TBB_MACHINE_DEFINE_LOAD_STORE
281 #define __TBB_USE_GENERIC_PART_WORD_CAS 1
282 #define __TBB_USE_GENERIC_FETCH_ADD 1
283 #define __TBB_USE_GENERIC_FETCH_STORE 1
284 #define __TBB_USE_GENERIC_SEQUENTIAL_CONSISTENCY_LOAD_STORE 1
286 #define __TBB_control_consistency_helper() __asm__ __volatile__("isync": : :"memory")
287 #define __TBB_full_memory_fence() __asm__ __volatile__( "sync": : :"memory")
292 #if __TBB_WORDSIZE==8
293 __asm__ __volatile__ (
"cntlzd %0,%0" :
"+r"(x));
294 return 63-
static_cast<intptr_t
>(x);
296 __asm__ __volatile__ (
"cntlzw %0,%0" :
"+r"(x));
297 return 31-
static_cast<intptr_t
>(x);
300 #define __TBB_Log2(V) __TBB_machine_lg(V)
304 #define __TBB_Flag __TBB_Flag
309 #define __TBB_TryLockByte(P) __TBB_machine_trylockbyte(P)
#define __TBB_machine_cmpswp8
#define __TBB_MACHINE_DEFINE_LOAD_STORE(S, ldx, stx, cmpx)
bool __TBB_machine_trylockbyte(__TBB_atomic __TBB_Flag &flag)
int32_t __TBB_machine_cmpswp4(volatile void *ptr, int32_t value, int32_t comparand)
static intptr_t __TBB_machine_lg(uintptr_t x)
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long value
static void store_with_release(volatile T &location, T value)
static T load_with_acquire(const volatile T &location)
static void store(T &location, T value)
static T load(const T &location)