Skip to content
This repository was archived by the owner on Jan 10, 2023. It is now read-only.

Commit f389f07

Browse files
committed
Merge branch 'main' of github.com:apple/swift into tensorflow-stage
* 'main' of github.com:apple/swift: [Runtime] Validate scribble data when metadata allocation scribbling is enabled.
2 parents 8655ce5 + 2b88336 commit f389f07

File tree

1 file changed

+62
-33
lines changed

1 file changed

+62
-33
lines changed

stdlib/public/runtime/Metadata.cpp

Lines changed: 62 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
#include "swift/Runtime/Mutex.h"
2929
#include "swift/Runtime/Once.h"
3030
#include "swift/Strings.h"
31+
#include "llvm/ADT/StringExtras.h"
3132
#include <algorithm>
3233
#include <cctype>
3334
#include <cinttypes>
@@ -5805,30 +5806,6 @@ bool swift::_swift_debug_metadataAllocationIterationEnabled = false;
58055806
const void * const swift::_swift_debug_allocationPoolPointer = &AllocationPool;
58065807
std::atomic<const void *> swift::_swift_debug_metadataAllocationBacktraceList;
58075808

5808-
static void checkAllocatorDebugEnvironmentVariable(void *context) {
5809-
_swift_debug_metadataAllocationIterationEnabled
5810-
= runtime::environment::SWIFT_DEBUG_ENABLE_METADATA_ALLOCATION_ITERATION();
5811-
if (!_swift_debug_metadataAllocationIterationEnabled) {
5812-
if (runtime::environment::SWIFT_DEBUG_ENABLE_METADATA_BACKTRACE_LOGGING())
5813-
swift::warning(RuntimeErrorFlagNone,
5814-
"Warning: SWIFT_DEBUG_ENABLE_METADATA_BACKTRACE_LOGGING "
5815-
"without SWIFT_DEBUG_ENABLE_METADATA_ALLOCATION_ITERATION "
5816-
"has no effect.\n");
5817-
return;
5818-
}
5819-
5820-
// Write a PoolTrailer to the end of InitialAllocationPool and shrink
5821-
// the pool accordingly.
5822-
auto poolCopy = AllocationPool.load(std::memory_order_relaxed);
5823-
assert(poolCopy.Begin == InitialAllocationPool.Pool);
5824-
size_t newPoolSize = InitialPoolSize - sizeof(PoolTrailer);
5825-
PoolTrailer trailer = { nullptr, newPoolSize };
5826-
memcpy(InitialAllocationPool.Pool + newPoolSize, &trailer,
5827-
sizeof(trailer));
5828-
poolCopy.Remaining = newPoolSize;
5829-
AllocationPool.store(poolCopy, std::memory_order_relaxed);
5830-
}
5831-
58325809
static void recordBacktrace(void *allocation) {
58335810
withCurrentBacktrace([&](void **addrs, int count) {
58345811
MetadataAllocationBacktraceHeader<InProcess> *record =
@@ -5847,28 +5824,74 @@ static void recordBacktrace(void *allocation) {
58475824
});
58485825
}
58495826

5850-
template <typename Pointee>
5851-
static inline void memsetScribble(Pointee *bytes, size_t totalSize) {
5827+
static inline bool scribbleEnabled() {
58525828
#ifndef NDEBUG
58535829
// When DEBUG is defined, always scribble.
5854-
memset(bytes, 0xAA, totalSize);
5830+
return true;
58555831
#else
58565832
// When DEBUG is not defined, only scribble when the
58575833
// SWIFT_DEBUG_ENABLE_MALLOC_SCRIBBLE environment variable is set.
5858-
if (SWIFT_UNLIKELY(
5859-
runtime::environment::SWIFT_DEBUG_ENABLE_MALLOC_SCRIBBLE())) {
5860-
memset(bytes, 0xAA, totalSize);
5861-
}
5834+
return SWIFT_UNLIKELY(
5835+
runtime::environment::SWIFT_DEBUG_ENABLE_MALLOC_SCRIBBLE());
58625836
#endif
58635837
}
58645838

5839+
static constexpr char scribbleByte = 0xAA;
5840+
5841+
template <typename Pointee>
5842+
static inline void memsetScribble(Pointee *bytes, size_t totalSize) {
5843+
if (scribbleEnabled())
5844+
memset(bytes, scribbleByte, totalSize);
5845+
}
5846+
5847+
/// When scribbling is enabled, check the specified region for the scribble
5848+
/// values to detect overflows. When scribbling is disabled, this is a no-op.
5849+
static inline void checkScribble(char *bytes, size_t totalSize) {
5850+
if (scribbleEnabled())
5851+
for (size_t i = 0; i < totalSize; i++)
5852+
if (bytes[i] != scribbleByte) {
5853+
const size_t maxToPrint = 16;
5854+
size_t remaining = totalSize - i;
5855+
size_t toPrint = std::min(remaining, maxToPrint);
5856+
std::string hex = toHex(llvm::StringRef{&bytes[i], toPrint});
5857+
swift::fatalError(
5858+
0, "corrupt metadata allocation arena detected at %p: %s%s",
5859+
&bytes[i], hex.c_str(), toPrint < remaining ? "..." : "");
5860+
}
5861+
}
5862+
5863+
static void checkAllocatorDebugEnvironmentVariables(void *context) {
5864+
memsetScribble(InitialAllocationPool.Pool, InitialPoolSize);
5865+
5866+
_swift_debug_metadataAllocationIterationEnabled =
5867+
runtime::environment::SWIFT_DEBUG_ENABLE_METADATA_ALLOCATION_ITERATION();
5868+
if (!_swift_debug_metadataAllocationIterationEnabled) {
5869+
if (runtime::environment::SWIFT_DEBUG_ENABLE_METADATA_BACKTRACE_LOGGING())
5870+
swift::warning(RuntimeErrorFlagNone,
5871+
"Warning: SWIFT_DEBUG_ENABLE_METADATA_BACKTRACE_LOGGING "
5872+
"without SWIFT_DEBUG_ENABLE_METADATA_ALLOCATION_ITERATION "
5873+
"has no effect.\n");
5874+
return;
5875+
}
5876+
5877+
// Write a PoolTrailer to the end of InitialAllocationPool and shrink
5878+
// the pool accordingly.
5879+
auto poolCopy = AllocationPool.load(std::memory_order_relaxed);
5880+
assert(poolCopy.Begin == InitialAllocationPool.Pool);
5881+
size_t newPoolSize = InitialPoolSize - sizeof(PoolTrailer);
5882+
PoolTrailer trailer = {nullptr, newPoolSize};
5883+
memcpy(InitialAllocationPool.Pool + newPoolSize, &trailer, sizeof(trailer));
5884+
poolCopy.Remaining = newPoolSize;
5885+
AllocationPool.store(poolCopy, std::memory_order_relaxed);
5886+
}
5887+
58655888
void *MetadataAllocator::Allocate(size_t size, size_t alignment) {
58665889
assert(Tag != 0);
58675890
assert(alignment <= alignof(void*));
58685891
assert(size % alignof(void*) == 0);
58695892

58705893
static OnceToken_t getenvToken;
5871-
SWIFT_ONCE_F(getenvToken, checkAllocatorDebugEnvironmentVariable, nullptr);
5894+
SWIFT_ONCE_F(getenvToken, checkAllocatorDebugEnvironmentVariables, nullptr);
58725895

58735896
// If the size is larger than the maximum, just use malloc.
58745897
if (size > PoolRange::MaxPoolAllocationSize) {
@@ -5899,6 +5922,7 @@ void *MetadataAllocator::Allocate(size_t size, size_t alignment) {
58995922
poolSize -= sizeof(PoolTrailer);
59005923
allocatedNewPage = true;
59015924
allocation = new char[PoolRange::PageSize];
5925+
memsetScribble(allocation, PoolRange::PageSize);
59025926

59035927
if (SWIFT_UNLIKELY(_swift_debug_metadataAllocationIterationEnabled)) {
59045928
PoolTrailer *newTrailer = (PoolTrailer *)(allocation + poolSize);
@@ -5919,7 +5943,6 @@ void *MetadataAllocator::Allocate(size_t size, size_t alignment) {
59195943
// If that succeeded, we've successfully allocated.
59205944
__msan_allocated_memory(allocation, sizeWithHeader);
59215945
__asan_unpoison_memory_region(allocation, sizeWithHeader);
5922-
memsetScribble(allocation, sizeWithHeader);
59235946

59245947
if (SWIFT_UNLIKELY(_swift_debug_metadataAllocationIterationEnabled)) {
59255948
AllocationHeader *header = (AllocationHeader *)allocation;
@@ -5932,8 +5955,10 @@ void *MetadataAllocator::Allocate(size_t size, size_t alignment) {
59325955
SWIFT_DEBUG_ENABLE_METADATA_BACKTRACE_LOGGING())
59335956
recordBacktrace(returnedAllocation);
59345957

5958+
checkScribble(returnedAllocation, size);
59355959
return returnedAllocation;
59365960
} else {
5961+
checkScribble(allocation, size);
59375962
return allocation;
59385963
}
59395964
}
@@ -5961,6 +5986,10 @@ void MetadataAllocator::Deallocate(const void *allocation, size_t size,
59615986
return;
59625987
}
59635988

5989+
// If we're scribbling, re-scribble the allocation so that the next call to
5990+
// Allocate sees what it expects.
5991+
memsetScribble(const_cast<void *>(allocation), size);
5992+
59645993
// Try to swap back to the pre-allocation state. If this fails,
59655994
// don't bother trying again; we'll just leak the allocation.
59665995
PoolRange newState = { reinterpret_cast<char*>(const_cast<void*>(allocation)),

0 commit comments

Comments
 (0)