31
31
#include " utils_math.h"
32
32
#include " utils_sanitizers.h"
33
33
34
+ // Temporary solution for disabling memory poisoning. This is needed because
35
+ // AddressSanitizer does not support memory poisoning for GPU allocations.
36
+ // More info: https://github.com/oneapi-src/unified-memory-framework/issues/634
37
+ #ifndef POISON_MEMORY
38
+ #define POISON_MEMORY 0
39
+ #endif
40
+
41
+ static inline void annotate_memory_inaccessible ([[maybe_unused]] void *ptr,
42
+ [[maybe_unused]] size_t size) {
43
+ #ifdef POISON_MEMORY
44
+ utils_annotate_memory_inaccessible (ptr, size);
45
+ #endif
46
+ }
47
+
48
+ static inline void annotate_memory_undefined ([[maybe_unused]] void *ptr,
49
+ [[maybe_unused]] size_t size) {
50
+ #ifdef POISON_MEMORY
51
+ utils_annotate_memory_undefined (ptr, size);
52
+ #endif
53
+ }
54
+
34
55
typedef struct umf_disjoint_pool_shared_limits_t {
35
56
size_t MaxSize;
36
57
std::atomic<size_t > TotalSize;
@@ -400,7 +421,7 @@ static void *memoryProviderAlloc(umf_memory_provider_handle_t hProvider,
400
421
if (ret != UMF_RESULT_SUCCESS) {
401
422
throw MemoryProviderError{ret};
402
423
}
403
- utils_annotate_memory_inaccessible (ptr, size);
424
+ annotate_memory_inaccessible (ptr, size);
404
425
return ptr;
405
426
}
406
427
@@ -822,7 +843,7 @@ void *DisjointPool::AllocImpl::allocate(size_t Size, bool &FromPool) try {
822
843
FromPool = false ;
823
844
if (Size > getParams ().MaxPoolableSize ) {
824
845
Ptr = memoryProviderAlloc (getMemHandle (), Size);
825
- utils_annotate_memory_undefined (Ptr, Size);
846
+ annotate_memory_undefined (Ptr, Size);
826
847
return Ptr;
827
848
}
828
849
@@ -839,7 +860,7 @@ void *DisjointPool::AllocImpl::allocate(size_t Size, bool &FromPool) try {
839
860
}
840
861
841
862
VALGRIND_DO_MEMPOOL_ALLOC (this , Ptr, Size);
842
- utils_annotate_memory_undefined (Ptr, Bucket.getSize ());
863
+ annotate_memory_undefined (Ptr, Bucket.getSize ());
843
864
844
865
return Ptr;
845
866
} catch (MemoryProviderError &e) {
@@ -877,7 +898,7 @@ void *DisjointPool::AllocImpl::allocate(size_t Size, size_t Alignment,
877
898
FromPool = false ;
878
899
if (AlignedSize > getParams ().MaxPoolableSize ) {
879
900
Ptr = memoryProviderAlloc (getMemHandle (), Size, Alignment);
880
- utils_annotate_memory_undefined (Ptr, Size);
901
+ annotate_memory_undefined (Ptr, Size);
881
902
return Ptr;
882
903
}
883
904
@@ -894,8 +915,7 @@ void *DisjointPool::AllocImpl::allocate(size_t Size, size_t Alignment,
894
915
}
895
916
896
917
VALGRIND_DO_MEMPOOL_ALLOC (this , AlignPtrUp (Ptr, Alignment), Size);
897
- utils_annotate_memory_undefined (AlignPtrUp (Ptr, Alignment), Size);
898
-
918
+ annotate_memory_undefined (AlignPtrUp (Ptr, Alignment), Size);
899
919
return AlignPtrUp (Ptr, Alignment);
900
920
} catch (MemoryProviderError &e) {
901
921
umf::getPoolLastStatusRef<DisjointPool>() = e.code ;
@@ -962,8 +982,7 @@ void DisjointPool::AllocImpl::deallocate(void *Ptr, bool &ToPool) {
962
982
}
963
983
964
984
VALGRIND_DO_MEMPOOL_FREE (this , Ptr);
965
- utils_annotate_memory_inaccessible (Ptr, Bucket.getSize ());
966
-
985
+ annotate_memory_inaccessible (Ptr, Bucket.getSize ());
967
986
if (Bucket.getSize () <= Bucket.ChunkCutOff ()) {
968
987
Bucket.freeChunk (Ptr, Slab, ToPool);
969
988
} else {
0 commit comments