@@ -178,7 +178,7 @@ void MsanThreadLocalMallocStorage::CommitBack() {
178
178
allocator.DestroyCache (GetAllocatorCache (this ));
179
179
}
180
180
181
- static void *MsanAllocate (StackTrace *stack, uptr size, uptr alignment,
181
+ static void *MsanAllocate (BufferedStackTrace *stack, uptr size, uptr alignment,
182
182
bool zeroise) {
183
183
if (size > max_malloc_size) {
184
184
if (AllocatorMayReturnNull ()) {
@@ -229,7 +229,7 @@ static void *MsanAllocate(StackTrace *stack, uptr size, uptr alignment,
229
229
return allocated;
230
230
}
231
231
232
- void MsanDeallocate (StackTrace *stack, void *p) {
232
+ void MsanDeallocate (BufferedStackTrace *stack, void *p) {
233
233
CHECK (p);
234
234
UnpoisonParam (1 );
235
235
RunFreeHooks (p);
@@ -259,8 +259,8 @@ void MsanDeallocate(StackTrace *stack, void *p) {
259
259
}
260
260
}
261
261
262
- static void *MsanReallocate (StackTrace *stack, void *old_p, uptr new_size ,
263
- uptr alignment) {
262
+ static void *MsanReallocate (BufferedStackTrace *stack, void *old_p,
263
+ uptr new_size, uptr alignment) {
264
264
Metadata *meta = reinterpret_cast <Metadata*>(allocator.GetMetaData (old_p));
265
265
uptr old_size = meta->requested_size ;
266
266
uptr actually_allocated_size = allocator.GetActuallyAllocatedSize (old_p);
@@ -284,7 +284,7 @@ static void *MsanReallocate(StackTrace *stack, void *old_p, uptr new_size,
284
284
return new_p;
285
285
}
286
286
287
- static void *MsanCalloc (StackTrace *stack, uptr nmemb, uptr size) {
287
+ static void *MsanCalloc (BufferedStackTrace *stack, uptr nmemb, uptr size) {
288
288
if (UNLIKELY (CheckForCallocOverflow (size, nmemb))) {
289
289
if (AllocatorMayReturnNull ())
290
290
return nullptr ;
@@ -320,15 +320,15 @@ static uptr AllocationSizeFast(const void *p) {
320
320
return reinterpret_cast <Metadata *>(allocator.GetMetaData (p))->requested_size ;
321
321
}
322
322
323
- void *msan_malloc (uptr size, StackTrace *stack) {
323
+ void *msan_malloc (uptr size, BufferedStackTrace *stack) {
324
324
return SetErrnoOnNull (MsanAllocate (stack, size, sizeof (u64 ), false ));
325
325
}
326
326
327
- void *msan_calloc (uptr nmemb, uptr size, StackTrace *stack) {
327
+ void *msan_calloc (uptr nmemb, uptr size, BufferedStackTrace *stack) {
328
328
return SetErrnoOnNull (MsanCalloc (stack, nmemb, size));
329
329
}
330
330
331
- void *msan_realloc (void *ptr, uptr size, StackTrace *stack) {
331
+ void *msan_realloc (void *ptr, uptr size, BufferedStackTrace *stack) {
332
332
if (!ptr)
333
333
return SetErrnoOnNull (MsanAllocate (stack, size, sizeof (u64 ), false ));
334
334
if (size == 0 ) {
@@ -338,7 +338,8 @@ void *msan_realloc(void *ptr, uptr size, StackTrace *stack) {
338
338
return SetErrnoOnNull (MsanReallocate (stack, ptr, size, sizeof (u64 )));
339
339
}
340
340
341
- void *msan_reallocarray (void *ptr, uptr nmemb, uptr size, StackTrace *stack) {
341
+ void *msan_reallocarray (void *ptr, uptr nmemb, uptr size,
342
+ BufferedStackTrace *stack) {
342
343
if (UNLIKELY (CheckForCallocOverflow (size, nmemb))) {
343
344
errno = errno_ENOMEM;
344
345
if (AllocatorMayReturnNull ())
@@ -348,11 +349,11 @@ void *msan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack) {
348
349
return msan_realloc (ptr, nmemb * size, stack);
349
350
}
350
351
351
- void *msan_valloc (uptr size, StackTrace *stack) {
352
+ void *msan_valloc (uptr size, BufferedStackTrace *stack) {
352
353
return SetErrnoOnNull (MsanAllocate (stack, size, GetPageSizeCached (), false ));
353
354
}
354
355
355
- void *msan_pvalloc (uptr size, StackTrace *stack) {
356
+ void *msan_pvalloc (uptr size, BufferedStackTrace *stack) {
356
357
uptr PageSize = GetPageSizeCached ();
357
358
if (UNLIKELY (CheckForPvallocOverflow (size, PageSize))) {
358
359
errno = errno_ENOMEM;
@@ -365,7 +366,7 @@ void *msan_pvalloc(uptr size, StackTrace *stack) {
365
366
return SetErrnoOnNull (MsanAllocate (stack, size, PageSize, false ));
366
367
}
367
368
368
- void *msan_aligned_alloc (uptr alignment, uptr size, StackTrace *stack) {
369
+ void *msan_aligned_alloc (uptr alignment, uptr size, BufferedStackTrace *stack) {
369
370
if (UNLIKELY (!CheckAlignedAllocAlignmentAndSize (alignment, size))) {
370
371
errno = errno_EINVAL;
371
372
if (AllocatorMayReturnNull ())
@@ -375,7 +376,7 @@ void *msan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack) {
375
376
return SetErrnoOnNull (MsanAllocate (stack, size, alignment, false ));
376
377
}
377
378
378
- void *msan_memalign (uptr alignment, uptr size, StackTrace *stack) {
379
+ void *msan_memalign (uptr alignment, uptr size, BufferedStackTrace *stack) {
379
380
if (UNLIKELY (!IsPowerOfTwo (alignment))) {
380
381
errno = errno_EINVAL;
381
382
if (AllocatorMayReturnNull ())
@@ -386,7 +387,7 @@ void *msan_memalign(uptr alignment, uptr size, StackTrace *stack) {
386
387
}
387
388
388
389
int msan_posix_memalign (void **memptr, uptr alignment, uptr size,
389
- StackTrace *stack) {
390
+ BufferedStackTrace *stack) {
390
391
if (UNLIKELY (!CheckPosixMemalignAlignment (alignment))) {
391
392
if (AllocatorMayReturnNull ())
392
393
return errno_EINVAL;
0 commit comments