@@ -440,7 +440,7 @@ enum IntAtomicOp
440
440
};
441
441
442
442
// handle int64 SLM atomic add/sub/xchg/and/or/xor/umax/umin
443
- ulong __builtin_spirv_OpAtomicUlongBinary_p3 ( enum IntAtomicOp atomicOp , volatile __local ulong * Pointer ,
443
+ ulong OVERLOADABLE __intel_atomic_binary ( enum IntAtomicOp atomicOp , volatile __local ulong * Pointer ,
444
444
uint Scope , uint Semantics , ulong Value )
445
445
{
446
446
@@ -466,7 +466,7 @@ ulong __builtin_spirv_OpAtomicUlongBinary_p3( enum IntAtomicOp atomicOp, volatil
466
466
}
467
467
468
468
// handle int64 SLM atomic IMin and IMax
469
- long __builtin_spirv_OpAtomicSlongBinary_p3 ( enum IntAtomicOp atomicOp , volatile __local long * Pointer ,
469
+ long OVERLOADABLE __intel_atomic_binary ( enum IntAtomicOp atomicOp , volatile __local long * Pointer ,
470
470
uint Scope , uint Semantics , long Value )
471
471
{
472
472
@@ -486,7 +486,7 @@ long __builtin_spirv_OpAtomicSlongBinary_p3( enum IntAtomicOp atomicOp, volatile
486
486
}
487
487
488
488
// handle uint64 SLM atomic inc/dec
489
- ulong __builtin_spirv_OpAtomicUlongUnary_p3 ( bool isInc , volatile __local long * Pointer , uint Scope , uint Semantics )
489
+ ulong OVERLOADABLE __intel_atomic_unary ( bool isInc , volatile __local ulong * Pointer , uint Scope , uint Semantics )
490
490
{
491
491
492
492
ulong orig ;
@@ -501,7 +501,7 @@ ulong __builtin_spirv_OpAtomicUlongUnary_p3( bool isInc, volatile __local long *
501
501
502
502
ulong __builtin_spirv_OpAtomicExchange_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , ulong Value )
503
503
{
504
- return __builtin_spirv_OpAtomicUlongBinary_p3 (ATOMIC_XCHG64 , Pointer , Scope , Semantics , Value );
504
+ return __intel_atomic_binary (ATOMIC_XCHG64 , Pointer , Scope , Semantics , Value );
505
505
}
506
506
507
507
@@ -842,7 +842,7 @@ ulong __builtin_spirv_OpAtomicIIncrement_p1i64_i32_i32( volatile __global ulong
842
842
843
843
ulong __builtin_spirv_OpAtomicIIncrement_p3i64_i32_i32 ( volatile __local ulong * Pointer , uint Scope , uint Semantics )
844
844
{
845
- return __builtin_spirv_OpAtomicUlongUnary_p3 (true, Pointer , Scope , Semantics );
845
+ return __intel_atomic_unary (true, Pointer , Scope , Semantics );
846
846
}
847
847
848
848
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -915,7 +915,7 @@ ulong __builtin_spirv_OpAtomicIDecrement_p1i64_i32_i32( volatile __global ulong
915
915
916
916
ulong __builtin_spirv_OpAtomicIDecrement_p3i64_i32_i32 ( volatile __local ulong * Pointer , uint Scope , uint Semantics )
917
917
{
918
- return __builtin_spirv_OpAtomicUlongUnary_p3 (false, Pointer , Scope , Semantics );
918
+ return __intel_atomic_unary (false, Pointer , Scope , Semantics );
919
919
}
920
920
921
921
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -988,7 +988,7 @@ ulong __builtin_spirv_OpAtomicIAdd_p1i64_i32_i32_i64( volatile __global ulong *P
988
988
989
989
ulong __builtin_spirv_OpAtomicIAdd_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , ulong Value )
990
990
{
991
- return __builtin_spirv_OpAtomicUlongBinary_p3 (ATOMIC_IADD64 , Pointer , Scope , Semantics , Value );
991
+ return __intel_atomic_binary (ATOMIC_IADD64 , Pointer , Scope , Semantics , Value );
992
992
}
993
993
994
994
@@ -1063,7 +1063,7 @@ ulong __builtin_spirv_OpAtomicISub_p1i64_i32_i32_i64( volatile __global ulong *P
1063
1063
1064
1064
ulong __builtin_spirv_OpAtomicISub_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , ulong Value )
1065
1065
{
1066
- return __builtin_spirv_OpAtomicUlongBinary_p3 (ATOMIC_SUB64 , Pointer , Scope , Semantics , Value );
1066
+ return __intel_atomic_binary (ATOMIC_SUB64 , Pointer , Scope , Semantics , Value );
1067
1067
}
1068
1068
1069
1069
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -1137,7 +1137,7 @@ long __builtin_spirv_OpAtomicSMin_p1i64_i32_i32_i64( volatile __global ulong *Po
1137
1137
1138
1138
long __builtin_spirv_OpAtomicSMin_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , long Value )
1139
1139
{
1140
- return __builtin_spirv_OpAtomicSlongBinary_p3 (ATOMIC_IMIN64 , Pointer , Scope , Semantics , Value );
1140
+ return __intel_atomic_binary (ATOMIC_IMIN64 , ( volatile __local long * ) Pointer , Scope , Semantics , Value );
1141
1141
}
1142
1142
1143
1143
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -1209,7 +1209,7 @@ ulong __builtin_spirv_OpAtomicUMin_p1i64_i32_i32_i64( volatile __global ulong *P
1209
1209
1210
1210
ulong __builtin_spirv_OpAtomicUMin_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , ulong Value )
1211
1211
{
1212
- return __builtin_spirv_OpAtomicUlongBinary_p3 (ATOMIC_UMIN64 , Pointer , Scope , Semantics , Value );
1212
+ return __intel_atomic_binary (ATOMIC_UMIN64 , Pointer , Scope , Semantics , Value );
1213
1213
}
1214
1214
1215
1215
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -1282,7 +1282,7 @@ long __builtin_spirv_OpAtomicSMax_p1i64_i32_i32_i64( volatile __global ulong *Po
1282
1282
1283
1283
long __builtin_spirv_OpAtomicSMax_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , long Value )
1284
1284
{
1285
- return __builtin_spirv_OpAtomicSlongBinary_p3 (ATOMIC_IMAX64 , Pointer , Scope , Semantics , Value );
1285
+ return __intel_atomic_binary (ATOMIC_IMAX64 , ( volatile __local long * ) Pointer , Scope , Semantics , Value );
1286
1286
}
1287
1287
1288
1288
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -1357,7 +1357,7 @@ ulong __builtin_spirv_OpAtomicUMax_p1i64_i32_i32_i64( volatile __global ulong *P
1357
1357
1358
1358
ulong __builtin_spirv_OpAtomicUMax_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , ulong Value )
1359
1359
{
1360
- return __builtin_spirv_OpAtomicUlongBinary_p3 (ATOMIC_UMAX64 , Pointer , Scope , Semantics , Value );
1360
+ return __intel_atomic_binary (ATOMIC_UMAX64 , Pointer , Scope , Semantics , Value );
1361
1361
}
1362
1362
1363
1363
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -1430,7 +1430,7 @@ ulong __builtin_spirv_OpAtomicAnd_p1i64_i32_i32_i64( volatile __global ulong *Po
1430
1430
1431
1431
ulong __builtin_spirv_OpAtomicAnd_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , ulong Value )
1432
1432
{
1433
- return __builtin_spirv_OpAtomicUlongBinary_p3 (ATOMIC_AND64 , Pointer , Scope , Semantics , Value );
1433
+ return __intel_atomic_binary (ATOMIC_AND64 , Pointer , Scope , Semantics , Value );
1434
1434
}
1435
1435
1436
1436
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -1503,7 +1503,7 @@ ulong __builtin_spirv_OpAtomicOr_p1i64_i32_i32_i64( volatile __global ulong *Poi
1503
1503
1504
1504
ulong __builtin_spirv_OpAtomicOr_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , ulong Value )
1505
1505
{
1506
- return __builtin_spirv_OpAtomicUlongBinary_p3 (ATOMIC_OR64 , Pointer , Scope , Semantics , Value );
1506
+ return __intel_atomic_binary (ATOMIC_OR64 , Pointer , Scope , Semantics , Value );
1507
1507
}
1508
1508
1509
1509
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
@@ -1577,7 +1577,7 @@ ulong __builtin_spirv_OpAtomicXor_p1i64_i32_i32_i64( volatile __global ulong *Po
1577
1577
1578
1578
ulong __builtin_spirv_OpAtomicXor_p3i64_i32_i32_i64 ( volatile __local ulong * Pointer , uint Scope , uint Semantics , ulong Value )
1579
1579
{
1580
- return __builtin_spirv_OpAtomicUlongBinary_p3 (ATOMIC_XOR64 , Pointer , Scope , Semantics , Value );
1580
+ return __intel_atomic_binary (ATOMIC_XOR64 , Pointer , Scope , Semantics , Value );
1581
1581
}
1582
1582
1583
1583
#if (__OPENCL_C_VERSION__ >= CL_VERSION_2_0 )
0 commit comments