@@ -1743,6 +1743,247 @@ double __builtin_spirv_OpAtomicFAddEXT_p4f64_i32_i32_f64( volatile __generic dou
1743
1743
}
1744
1744
#endif // (__OPENCL_C_VERSION__ >= CL_VERSION_2_0)
1745
1745
1746
+ half __builtin_spirv_OpAtomicFMinEXT_p0f16_i32_i32_f16 (volatile private half * Pointer , uint Scope , uint Semantics , half Value )
1747
+ {
1748
+ half orig = * Pointer ;
1749
+ * Pointer = (orig < Value ) ? orig : Value ;
1750
+ return orig ;
1751
+ }
1752
+
1753
+ half __builtin_spirv_OpAtomicFMinEXT_p1f16_i32_i32_f16 (volatile global half * Pointer , uint Scope , uint Semantics , half Value )
1754
+ {
1755
+ half orig ;
1756
+ FENCE_PRE_OP (Scope , Semantics , true)
1757
+ SPINLOCK_START (global )
1758
+ orig = * Pointer ;
1759
+ * Pointer = (orig < Value ) ? orig : Value ;
1760
+ SPINLOCK_END (global )
1761
+ FENCE_POST_OP (Scope , Semantics , true )
1762
+ return orig ;
1763
+ }
1764
+
1765
+ half __builtin_spirv_OpAtomicFMinEXT_p3f16_i32_i32_f16 (volatile local half * Pointer , uint Scope , uint Semantics , half Value )
1766
+ {
1767
+ half orig ;
1768
+ FENCE_PRE_OP (Scope , Semantics , false)
1769
+ SPINLOCK_START (local )
1770
+ orig = * Pointer ;
1771
+ * Pointer = (orig < Value ) ? orig : Value ;
1772
+ SPINLOCK_END (local )
1773
+ FENCE_POST_OP (Scope , Semantics , false )
1774
+ return orig ;
1775
+ }
1776
+
1777
+ #if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
1778
+ half __builtin_spirv_OpAtomicFMinEXT_p4f16_i32_i32_f16 (volatile generic half * Pointer , uint Scope , uint Semantics , half Value )
1779
+ {
1780
+ if (__builtin_spirv_OpGenericCastToPtrExplicit_p3i8_p4i8_i32 (__builtin_astype ((Pointer ), __generic void * ), StorageWorkgroup ))
1781
+ {
1782
+ return __builtin_spirv_OpAtomicFMinEXT_p3f16_i32_i32_f16 ((__local half * )Pointer , Scope , Semantics , Value );
1783
+ }
1784
+ else
1785
+ {
1786
+ return __builtin_spirv_OpAtomicFMinEXT_p1f16_i32_i32_f16 ((__global half * )Pointer , Scope , Semantics , Value );
1787
+ }
1788
+ }
1789
+ #endif // (__OPENCL_C_VERSION__ >= CL_VERSION_2_0)
1790
+
1791
+ float __builtin_spirv_OpAtomicFMinEXT_p0f32_i32_i32_f32 (volatile private float * Pointer , uint Scope , uint Semantics , float Value )
1792
+ {
1793
+ float orig = * Pointer ;
1794
+ * Pointer = (orig < Value ) ? orig : Value ;
1795
+ return orig ;
1796
+ }
1797
+
1798
+ float __builtin_spirv_OpAtomicFMinEXT_p1f32_i32_i32_f32 (volatile global float * Pointer , uint Scope , uint Semantics , float Value )
1799
+ {
1800
+ atomic_operation_1op_as_float (__builtin_IB_atomic_min_global_f32 , float , Pointer , Scope , Semantics , Value , true);
1801
+ }
1802
+
1803
+ float __builtin_spirv_OpAtomicFMinEXT_p3f32_i32_i32_f32 (volatile local float * Pointer , uint Scope , uint Semantics , float Value )
1804
+ {
1805
+ atomic_operation_1op_as_float (__builtin_IB_atomic_min_local_f32 , float , Pointer , Scope , Semantics , Value , false);
1806
+ }
1807
+
1808
+ #if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
1809
+ float __builtin_spirv_OpAtomicFMinEXT_p4f32_i32_i32_f32 (volatile generic float * Pointer , uint Scope , uint Semantics , float Value )
1810
+ {
1811
+ if (__builtin_spirv_OpGenericCastToPtrExplicit_p3i8_p4i8_i32 (__builtin_astype ((Pointer ), __generic void * ), StorageWorkgroup ))
1812
+ {
1813
+ return __builtin_spirv_OpAtomicFMinEXT_p3f32_i32_i32_f32 ((__local float * )Pointer , Scope , Semantics , Value );
1814
+ }
1815
+ else
1816
+ {
1817
+ return __builtin_spirv_OpAtomicFMinEXT_p1f32_i32_i32_f32 ((__global float * )Pointer , Scope , Semantics , Value );
1818
+ }
1819
+ }
1820
+ #endif // (__OPENCL_C_VERSION__ >= CL_VERSION_2_0)
1821
+
1822
+ double __builtin_spirv_OpAtomicFMinEXT_p0f64_i32_i32_f64 (volatile private double * Pointer , uint Scope , uint Semantics , double Value )
1823
+ {
1824
+ double orig = * Pointer ;
1825
+ * Pointer = (orig < Value ) ? orig : Value ;
1826
+ return orig ;
1827
+ }
1828
+
1829
+ double __builtin_spirv_OpAtomicFMinEXT_p1f64_i32_i32_f64 (volatile global double * Pointer , uint Scope , uint Semantics , double Value )
1830
+ {
1831
+ double orig ;
1832
+ FENCE_PRE_OP (Scope , Semantics , true)
1833
+ SPINLOCK_START (global )
1834
+ orig = * Pointer ;
1835
+ * Pointer = (orig < Value ) ? orig : Value ;
1836
+ SPINLOCK_END (global )
1837
+ FENCE_POST_OP (Scope , Semantics , true )
1838
+ return orig ;
1839
+ }
1840
+
1841
+ double __builtin_spirv_OpAtomicFMinEXT_p3f64_i32_i32_f64 (volatile local double * Pointer , uint Scope , uint Semantics , double Value )
1842
+ {
1843
+ double orig ;
1844
+ FENCE_PRE_OP (Scope , Semantics , false)
1845
+ SPINLOCK_START (local )
1846
+ orig = * Pointer ;
1847
+ * Pointer = (orig < Value ) ? orig : Value ;
1848
+ SPINLOCK_END (local )
1849
+ FENCE_POST_OP (Scope , Semantics , false )
1850
+ return orig ;
1851
+ }
1852
+
1853
+ #if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
1854
+ double __builtin_spirv_OpAtomicFMinEXT_p4f64_i32_i32_f64 (volatile generic double * Pointer , uint Scope , uint Semantics , double Value )
1855
+ {
1856
+ if (__builtin_spirv_OpGenericCastToPtrExplicit_p3i8_p4i8_i32 (__builtin_astype ((Pointer ), __generic void * ), StorageWorkgroup ))
1857
+ {
1858
+ return __builtin_spirv_OpAtomicFMinEXT_p3f64_i32_i32_f64 ((__local double * )Pointer , Scope , Semantics , Value );
1859
+ }
1860
+ else
1861
+ {
1862
+ return __builtin_spirv_OpAtomicFMinEXT_p1f64_i32_i32_f64 ((__global double * )Pointer , Scope , Semantics , Value );
1863
+ }
1864
+ }
1865
+ #endif // (__OPENCL_C_VERSION__ >= CL_VERSION_2_0)
1866
+
1867
+ half __builtin_spirv_OpAtomicFMaxEXT_p0f16_i32_i32_f16 (volatile private half * Pointer , uint Scope , uint Semantics , half Value )
1868
+ {
1869
+ half orig = * Pointer ;
1870
+ * Pointer = (orig > Value ) ? orig : Value ;
1871
+ return orig ;
1872
+ }
1873
+
1874
+ half __builtin_spirv_OpAtomicFMaxEXT_p1f16_i32_i32_f16 (volatile global half * Pointer , uint Scope , uint Semantics , half Value )
1875
+ {
1876
+ half orig ;
1877
+ FENCE_PRE_OP (Scope , Semantics , true)
1878
+ SPINLOCK_START (global )
1879
+ orig = * Pointer ;
1880
+ * Pointer = (orig > Value ) ? orig : Value ;
1881
+ SPINLOCK_END (global )
1882
+ FENCE_POST_OP (Scope , Semantics , true )
1883
+ return orig ;
1884
+ }
1885
+
1886
+ half __builtin_spirv_OpAtomicFMaxEXT_p3f16_i32_i32_f16 (volatile local half * Pointer , uint Scope , uint Semantics , half Value )
1887
+ {
1888
+ half orig ;
1889
+ FENCE_PRE_OP (Scope , Semantics , false)
1890
+ SPINLOCK_START (local )
1891
+ orig = * Pointer ;
1892
+ * Pointer = (orig > Value ) ? orig : Value ;
1893
+ SPINLOCK_END (local )
1894
+ FENCE_POST_OP (Scope , Semantics , false )
1895
+ return orig ;
1896
+ }
1897
+
1898
+ #if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
1899
+ half __builtin_spirv_OpAtomicFMaxEXT_p4f16_i32_i32_f16 (volatile generic half * Pointer , uint Scope , uint Semantics , half Value )
1900
+ {
1901
+ if (__builtin_spirv_OpGenericCastToPtrExplicit_p3i8_p4i8_i32 (__builtin_astype ((Pointer ), __generic void * ), StorageWorkgroup ))
1902
+ {
1903
+ return __builtin_spirv_OpAtomicFMaxEXT_p3f16_i32_i32_f16 ((__local half * )Pointer , Scope , Semantics , Value );
1904
+ }
1905
+ else
1906
+ {
1907
+ return __builtin_spirv_OpAtomicFMaxEXT_p1f16_i32_i32_f16 ((__global half * )Pointer , Scope , Semantics , Value );
1908
+ }
1909
+ }
1910
+ #endif // (__OPENCL_C_VERSION__ >= CL_VERSION_2_0)
1911
+
1912
+ float __builtin_spirv_OpAtomicFMaxEXT_p0f32_i32_i32_f32 (volatile private float * Pointer , uint Scope , uint Semantics , float Value )
1913
+ {
1914
+ float orig = * Pointer ;
1915
+ * Pointer = (orig > Value ) ? orig : Value ;
1916
+ return orig ;
1917
+ }
1918
+
1919
+ float __builtin_spirv_OpAtomicFMaxEXT_p1f32_i32_i32_f32 (volatile global float * Pointer , uint Scope , uint Semantics , float Value )
1920
+ {
1921
+ atomic_operation_1op_as_float (__builtin_IB_atomic_max_global_f32 , float , Pointer , Scope , Semantics , Value , true);
1922
+ }
1923
+
1924
+ float __builtin_spirv_OpAtomicFMaxEXT_p3f32_i32_i32_f32 (volatile local float * Pointer , uint Scope , uint Semantics , float Value )
1925
+ {
1926
+ atomic_operation_1op_as_float (__builtin_IB_atomic_max_local_f32 , float , Pointer , Scope , Semantics , Value , false);
1927
+ }
1928
+
1929
+ #if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
1930
+ float __builtin_spirv_OpAtomicFMaxEXT_p4f32_i32_i32_f32 (volatile generic float * Pointer , uint Scope , uint Semantics , float Value )
1931
+ {
1932
+ if (__builtin_spirv_OpGenericCastToPtrExplicit_p3i8_p4i8_i32 (__builtin_astype ((Pointer ), __generic void * ), StorageWorkgroup ))
1933
+ {
1934
+ return __builtin_spirv_OpAtomicFMaxEXT_p3f32_i32_i32_f32 ((__local float * )Pointer , Scope , Semantics , Value );
1935
+ }
1936
+ else
1937
+ {
1938
+ return __builtin_spirv_OpAtomicFMaxEXT_p1f32_i32_i32_f32 ((__global float * )Pointer , Scope , Semantics , Value );
1939
+ }
1940
+ }
1941
+ #endif // (__OPENCL_C_VERSION__ >= CL_VERSION_2_0)
1942
+
1943
+ double __builtin_spirv_OpAtomicFMaxEXT_p0f64_i32_i32_f64 (volatile private double * Pointer , uint Scope , uint Semantics , double Value )
1944
+ {
1945
+ double orig = * Pointer ;
1946
+ * Pointer = (orig > Value ) ? orig : Value ;
1947
+ return orig ;
1948
+ }
1949
+
1950
+ double __builtin_spirv_OpAtomicFMaxEXT_p1f64_i32_i32_f64 (volatile global double * Pointer , uint Scope , uint Semantics , double Value )
1951
+ {
1952
+ double orig ;
1953
+ FENCE_PRE_OP (Scope , Semantics , true)
1954
+ SPINLOCK_START (global )
1955
+ orig = * Pointer ;
1956
+ * Pointer = (orig > Value ) ? orig : Value ;
1957
+ SPINLOCK_END (global )
1958
+ FENCE_POST_OP (Scope , Semantics , true )
1959
+ return orig ;
1960
+ }
1961
+
1962
+ double __builtin_spirv_OpAtomicFMaxEXT_p3f64_i32_i32_f64 (volatile local double * Pointer , uint Scope , uint Semantics , double Value )
1963
+ {
1964
+ double orig ;
1965
+ FENCE_PRE_OP (Scope , Semantics , false)
1966
+ SPINLOCK_START (local )
1967
+ orig = * Pointer ;
1968
+ * Pointer = (orig > Value ) ? orig : Value ;
1969
+ SPINLOCK_END (local )
1970
+ FENCE_POST_OP (Scope , Semantics , false )
1971
+ return orig ;
1972
+ }
1973
+
1974
+ #if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
1975
+ double __builtin_spirv_OpAtomicFMaxEXT_p4f64_i32_i32_f64 (volatile generic double * Pointer , uint Scope , uint Semantics , double Value )
1976
+ {
1977
+ if (__builtin_spirv_OpGenericCastToPtrExplicit_p3i8_p4i8_i32 (__builtin_astype ((Pointer ), __generic void * ), StorageWorkgroup ))
1978
+ {
1979
+ return __builtin_spirv_OpAtomicFMaxEXT_p3f64_i32_i32_f64 ((__local double * )Pointer , Scope , Semantics , Value );
1980
+ }
1981
+ else
1982
+ {
1983
+ return __builtin_spirv_OpAtomicFMaxEXT_p1f64_i32_i32_f64 ((__global double * )Pointer , Scope , Semantics , Value );
1984
+ }
1985
+ }
1986
+ #endif // (__OPENCL_C_VERSION__ >= CL_VERSION_2_0)
1746
1987
1747
1988
#undef ATOMIC_FLAG_FALSE
1748
1989
#undef ATOMIC_FLAG_TRUE
0 commit comments