@@ -249,13 +249,34 @@ __CLC_SUBGROUP_COLLECTIVE(FMax, __CLC_MAX, half, -HALF_MAX)
249
249
__CLC_SUBGROUP_COLLECTIVE (FMax , __CLC_MAX , float , - FLT_MAX )
250
250
__CLC_SUBGROUP_COLLECTIVE (FMax , __CLC_MAX , double , - DBL_MAX )
251
251
252
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseAndKHR , __CLC_AND , and , uchar , ~0 )
253
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseOrKHR , __CLC_OR , or , uchar , 0 )
254
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseXorKHR , __CLC_XOR , xor , uchar , 0 )
255
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseAndKHR , __CLC_AND , and , char , ~0 )
256
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseOrKHR , __CLC_OR , or , char , 0 )
257
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseXorKHR , __CLC_XOR , xor , char , 0 )
258
+
259
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseAndKHR , __CLC_AND , and , ushort , ~0 )
260
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseOrKHR , __CLC_OR , or , ushort , 0 )
261
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseXorKHR , __CLC_XOR , xor , ushort , 0 )
262
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseAndKHR , __CLC_AND , and , short , ~0 )
263
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseOrKHR , __CLC_OR , or , short , 0 )
264
+ __CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseXorKHR , __CLC_XOR , xor , short , 0 )
265
+
252
266
__CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseAndKHR , __CLC_AND , and , uint , ~0 )
253
267
__CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseOrKHR , __CLC_OR , or , uint , 0 )
254
268
__CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseXorKHR , __CLC_XOR , xor , uint , 0 )
255
269
__CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseAndKHR , __CLC_AND , and , int , ~0 )
256
270
__CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseOrKHR , __CLC_OR , or , int , 0 )
257
271
__CLC_SUBGROUP_COLLECTIVE_REDUX (BitwiseXorKHR , __CLC_XOR , xor , int , 0 )
258
272
273
+ __CLC_SUBGROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , ulong , ~0l )
274
+ __CLC_SUBGROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , ulong , 0l )
275
+ __CLC_SUBGROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , ulong , 0l )
276
+ __CLC_SUBGROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , long , ~0l )
277
+ __CLC_SUBGROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , long , 0l )
278
+ __CLC_SUBGROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , long , 0l )
279
+
259
280
#undef __CLC_SUBGROUP_COLLECTIVE_BODY
260
281
#undef __CLC_SUBGROUP_COLLECTIVE
261
282
#undef __CLC_SUBGROUP_COLLECTIVE_REDUX
@@ -376,13 +397,34 @@ __CLC_GROUP_COLLECTIVE(FMax, __CLC_MAX, half, -HALF_MAX)
376
397
__CLC_GROUP_COLLECTIVE (FMax , __CLC_MAX , float , - FLT_MAX )
377
398
__CLC_GROUP_COLLECTIVE (FMax , __CLC_MAX , double , - DBL_MAX )
378
399
400
+ __CLC_GROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , uchar , ~0 )
401
+ __CLC_GROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , uchar , 0 )
402
+ __CLC_GROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , uchar , 0 )
403
+ __CLC_GROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , char , ~0 )
404
+ __CLC_GROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , char , 0 )
405
+ __CLC_GROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , char , 0 )
406
+
407
+ __CLC_GROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , ushort , ~0 )
408
+ __CLC_GROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , ushort , 0 )
409
+ __CLC_GROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , ushort , 0 )
410
+ __CLC_GROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , short , ~0 )
411
+ __CLC_GROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , short , 0 )
412
+ __CLC_GROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , short , 0 )
413
+
379
414
__CLC_GROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , uint , ~0 )
380
415
__CLC_GROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , uint , 0 )
381
416
__CLC_GROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , uint , 0 )
382
417
__CLC_GROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , int , ~0 )
383
418
__CLC_GROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , int , 0 )
384
419
__CLC_GROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , int , 0 )
385
420
421
+ __CLC_GROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , ulong , ~0l )
422
+ __CLC_GROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , ulong , 0l )
423
+ __CLC_GROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , ulong , 0l )
424
+ __CLC_GROUP_COLLECTIVE (BitwiseAndKHR , __CLC_AND , long , ~0l )
425
+ __CLC_GROUP_COLLECTIVE (BitwiseOrKHR , __CLC_OR , long , 0l )
426
+ __CLC_GROUP_COLLECTIVE (BitwiseXorKHR , __CLC_XOR , long , 0l )
427
+
386
428
// half requires additional mangled entry points
387
429
_CLC_DEF _CLC_CONVERGENT half _Z17__spirv_GroupFAddjjDF16_ (uint scope , uint op ,
388
430
half x ) {
0 commit comments