@@ -72,62 +72,63 @@ _CLC_OVERLOAD _CLC_DECL void __spirv_MemoryBarrier(unsigned int, unsigned int);
72
72
} \
73
73
}
74
74
75
- #define __CLC_NVVM_ATOMIC_IMPL (TYPE , TYPE_MANGLED , TYPE_NV , TYPE_MANGLED_NV , \
76
- OP , NAME_MANGLED , ADDR_SPACE , \
77
- ADDR_SPACE_MANGLED , ADDR_SPACE_NV ) \
78
- _CLC_DECL TYPE \
79
- NAME_MANGLED##PU3##ADDR_SPACE_MANGLED##TYPE_MANGLED##N5__spv5Scope4FlagENS1_19MemorySemanticsMask4FlagE##TYPE_MANGLED( \
80
- volatile ADDR_SPACE TYPE *pointer, enum Scope scope, \
81
- enum MemorySemanticsMask semantics, TYPE value) { \
82
- /* Semantics mask may include memory order, storage class and other info \
83
- Memory order is stored in the lowest 5 bits */ \
84
- unsigned int order = semantics & 0x1F ; \
85
- switch (order ) { \
86
- case None : \
87
- __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
88
- ADDR_SPACE , ADDR_SPACE_NV , ) \
89
- break ; \
90
- case Acquire : \
91
- if (__clc_nvvm_reflect_arch () >= 700 ) { \
92
- __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
93
- ADDR_SPACE , ADDR_SPACE_NV , _acquire ) \
94
- } else { \
95
- __CLC_NVVM_ATOMIC_IMPL_ACQUIRE_FENCE (TYPE , TYPE_NV , TYPE_MANGLED_NV , \
96
- OP , ADDR_SPACE , ADDR_SPACE_NV ) \
97
- } \
98
- break ; \
99
- case Release : \
100
- if (__clc_nvvm_reflect_arch () >= 700 ) { \
101
- __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
102
- ADDR_SPACE , ADDR_SPACE_NV , _release ) \
103
- } else { \
104
- __spirv_MemoryBarrier (scope , Release ); \
105
- __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
106
- ADDR_SPACE , ADDR_SPACE_NV , ) \
107
- } \
108
- break ; \
109
- case AcquireRelease : \
110
- if (__clc_nvvm_reflect_arch () >= 700 ) { \
111
- __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
112
- ADDR_SPACE , ADDR_SPACE_NV , _acq_rel ) \
113
- } else { \
114
- __spirv_MemoryBarrier (scope , Release ); \
115
- __CLC_NVVM_ATOMIC_IMPL_ACQUIRE_FENCE (TYPE , TYPE_NV , TYPE_MANGLED_NV , \
116
- OP , ADDR_SPACE , ADDR_SPACE_NV ) \
117
- } \
118
- break ; \
119
- } \
120
- __builtin_trap (); \
121
- __builtin_unreachable (); \
75
+ #define __CLC_NVVM_ATOMIC_IMPL (FN_MANGLED , TYPE , TYPE_MANGLED , TYPE_NV , \
76
+ TYPE_MANGLED_NV , OP , ADDR_SPACE , ADDR_SPACE_NV ) \
77
+ __attribute__((always_inline)) _CLC_DECL TYPE FN_MANGLED( \
78
+ volatile ADDR_SPACE TYPE *pointer, enum Scope scope, \
79
+ enum MemorySemanticsMask semantics, TYPE value) { \
80
+ /* Semantics mask may include memory order, storage class and other info \
81
+ Memory order is stored in the lowest 5 bits */ \
82
+ unsigned int order = semantics & 0x1F ; \
83
+ switch (order ) { \
84
+ case None : \
85
+ __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
86
+ ADDR_SPACE , ADDR_SPACE_NV , ) \
87
+ break ; \
88
+ case Acquire : \
89
+ if (__clc_nvvm_reflect_arch () >= 700 ) { \
90
+ __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
91
+ ADDR_SPACE , ADDR_SPACE_NV , _acquire ) \
92
+ } else { \
93
+ __CLC_NVVM_ATOMIC_IMPL_ACQUIRE_FENCE (TYPE , TYPE_NV , TYPE_MANGLED_NV , \
94
+ OP , ADDR_SPACE , ADDR_SPACE_NV ) \
95
+ } \
96
+ break ; \
97
+ case Release : \
98
+ if (__clc_nvvm_reflect_arch () >= 700 ) { \
99
+ __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
100
+ ADDR_SPACE , ADDR_SPACE_NV , _release ) \
101
+ } else { \
102
+ __spirv_MemoryBarrier (scope , Release ); \
103
+ __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
104
+ ADDR_SPACE , ADDR_SPACE_NV , ) \
105
+ } \
106
+ break ; \
107
+ case AcquireRelease : \
108
+ if (__clc_nvvm_reflect_arch () >= 700 ) { \
109
+ __CLC_NVVM_ATOMIC_IMPL_ORDER (TYPE , TYPE_NV , TYPE_MANGLED_NV , OP , \
110
+ ADDR_SPACE , ADDR_SPACE_NV , _acq_rel ) \
111
+ } else { \
112
+ __spirv_MemoryBarrier (scope , Release ); \
113
+ __CLC_NVVM_ATOMIC_IMPL_ACQUIRE_FENCE (TYPE , TYPE_NV , TYPE_MANGLED_NV , \
114
+ OP , ADDR_SPACE , ADDR_SPACE_NV ) \
115
+ } \
116
+ break ; \
117
+ } \
118
+ __builtin_trap (); \
119
+ __builtin_unreachable (); \
122
120
}
123
121
124
- #define __CLC_NVVM_ATOMIC (TYPE , TYPE_MANGLED , TYPE_NV , TYPE_MANGLED_NV , OP , \
125
- NAME_MANGLED ) \
126
- __attribute__((always_inline)) \
127
- __CLC_NVVM_ATOMIC_IMPL(TYPE, TYPE_MANGLED, TYPE_NV, TYPE_MANGLED_NV, OP, \
128
- NAME_MANGLED, __global, AS1, _global_) \
129
- __attribute__((always_inline)) \
130
- __CLC_NVVM_ATOMIC_IMPL(TYPE, TYPE_MANGLED, TYPE_NV, TYPE_MANGLED_NV, OP, \
131
- NAME_MANGLED, __local, AS3, _shared_)
122
+ #define __CLC_NVVM_ATOMIC (TYPE , TYPE_MANGLED , TYPE_NV , TYPE_MANGLED_NV , OP , \
123
+ NAME_MANGLED ) \
124
+ __CLC_NVVM_ATOMIC_IMPL( \
125
+ NAME_MANGLED##P##TYPE_MANGLED##N5__spv5Scope4FlagENS0_19MemorySemanticsMask4FlagE##TYPE_MANGLED, \
126
+ TYPE, TYPE_MANGLED, TYPE_NV, TYPE_MANGLED_NV, OP, , _gen_) \
127
+ __CLC_NVVM_ATOMIC_IMPL( \
128
+ NAME_MANGLED##PU3AS1##TYPE_MANGLED##N5__spv5Scope4FlagENS1_19MemorySemanticsMask4FlagE##TYPE_MANGLED, \
129
+ TYPE, TYPE_MANGLED, TYPE_NV, TYPE_MANGLED_NV, OP, __global, _global_) \
130
+ __CLC_NVVM_ATOMIC_IMPL( \
131
+ NAME_MANGLED##PU3AS3##TYPE_MANGLED##N5__spv5Scope4FlagENS1_19MemorySemanticsMask4FlagE##TYPE_MANGLED, \
132
+ TYPE, TYPE_MANGLED, TYPE_NV, TYPE_MANGLED_NV, OP, __local, _shared_)
132
133
133
134
#endif
0 commit comments