|
16 | 16 | #include "asan_report.h"
|
17 | 17 | #include "asan_stack.h"
|
18 | 18 | #include "sanitizer_common/sanitizer_atomic.h"
|
| 19 | +#include "sanitizer_common/sanitizer_common.h" |
19 | 20 | #include "sanitizer_common/sanitizer_flags.h"
|
20 | 21 | #include "sanitizer_common/sanitizer_interface_internal.h"
|
21 | 22 | #include "sanitizer_common/sanitizer_libc.h"
|
@@ -576,6 +577,185 @@ void __sanitizer_annotate_double_ended_contiguous_container(
|
576 | 577 | }
|
577 | 578 | }
|
578 | 579 |
|
| 580 | +// Marks the specified number of bytes in a granule as accessible or |
| 581 | +// poisones the whole granule with kAsanContiguousContainerOOBMagic value. |
| 582 | +static void SetContainerGranule(uptr ptr, u8 n) { |
| 583 | + constexpr uptr granularity = ASAN_SHADOW_GRANULARITY; |
| 584 | + u8 s = (n == granularity) ? 0 : (n ? n : kAsanContiguousContainerOOBMagic); |
| 585 | + *(u8 *)MemToShadow(ptr) = s; |
| 586 | +} |
| 587 | + |
| 588 | +// Performs a byte-by-byte copy of ASan annotations (shadow memory values). |
| 589 | +// Result may be different due to ASan limitations, but result cannot lead |
| 590 | +// to false positives (more memory than requested may get unpoisoned). |
| 591 | +static void SlowCopyContainerAnnotations(uptr src_beg, uptr src_end, |
| 592 | + uptr dst_beg, uptr dst_end) { |
| 593 | + constexpr uptr granularity = ASAN_SHADOW_GRANULARITY; |
| 594 | + uptr dst_end_down = RoundDownTo(dst_end, granularity); |
| 595 | + uptr src_ptr = src_beg; |
| 596 | + uptr dst_ptr = dst_beg; |
| 597 | + |
| 598 | + while (dst_ptr < dst_end) { |
| 599 | + uptr granule_beg = RoundDownTo(dst_ptr, granularity); |
| 600 | + uptr granule_end = granule_beg + granularity; |
| 601 | + uptr unpoisoned_bytes = 0; |
| 602 | + |
| 603 | + uptr end = Min(granule_end, dst_end); |
| 604 | + for (; dst_ptr != end; ++dst_ptr, ++src_ptr) |
| 605 | + if (!AddressIsPoisoned(src_ptr)) |
| 606 | + unpoisoned_bytes = dst_ptr - granule_beg + 1; |
| 607 | + |
| 608 | + if (dst_ptr == dst_end && dst_end != dst_end_down && |
| 609 | + !AddressIsPoisoned(dst_end)) |
| 610 | + continue; |
| 611 | + |
| 612 | + if (unpoisoned_bytes != 0 || granule_beg >= dst_beg) |
| 613 | + SetContainerGranule(granule_beg, unpoisoned_bytes); |
| 614 | + else if (!AddressIsPoisoned(dst_beg)) |
| 615 | + SetContainerGranule(granule_beg, dst_beg - granule_beg); |
| 616 | + } |
| 617 | +} |
| 618 | + |
| 619 | +// Performs a byte-by-byte copy of ASan annotations (shadow memory values), |
| 620 | +// going through bytes in reversed order, but not reversing annotations. |
| 621 | +// Result may be different due to ASan limitations, but result cannot lead |
| 622 | +// to false positives (more memory than requested may get unpoisoned). |
| 623 | +static void SlowReversedCopyContainerAnnotations(uptr src_beg, uptr src_end, |
| 624 | + uptr dst_beg, uptr dst_end) { |
| 625 | + constexpr uptr granularity = ASAN_SHADOW_GRANULARITY; |
| 626 | + uptr dst_end_down = RoundDownTo(dst_end, granularity); |
| 627 | + uptr src_ptr = src_end; |
| 628 | + uptr dst_ptr = dst_end; |
| 629 | + |
| 630 | + while (dst_ptr > dst_beg) { |
| 631 | + uptr granule_beg = RoundDownTo(dst_ptr - 1, granularity); |
| 632 | + uptr unpoisoned_bytes = 0; |
| 633 | + |
| 634 | + uptr end = Max(granule_beg, dst_beg); |
| 635 | + for (; dst_ptr != end; --dst_ptr, --src_ptr) |
| 636 | + if (unpoisoned_bytes == 0 && !AddressIsPoisoned(src_ptr - 1)) |
| 637 | + unpoisoned_bytes = dst_ptr - granule_beg; |
| 638 | + |
| 639 | + if (dst_ptr >= dst_end_down && !AddressIsPoisoned(dst_end)) |
| 640 | + continue; |
| 641 | + |
| 642 | + if (granule_beg == dst_ptr || unpoisoned_bytes != 0) |
| 643 | + SetContainerGranule(granule_beg, unpoisoned_bytes); |
| 644 | + else if (!AddressIsPoisoned(dst_beg)) |
| 645 | + SetContainerGranule(granule_beg, dst_beg - granule_beg); |
| 646 | + } |
| 647 | +} |
| 648 | + |
| 649 | +// A helper function for __sanitizer_copy_contiguous_container_annotations, |
| 650 | +// has assumption about begin and end of the container. |
| 651 | +// Should not be used stand alone. |
| 652 | +static void CopyContainerFirstGranuleAnnotation(uptr src_beg, uptr dst_beg) { |
| 653 | + constexpr uptr granularity = ASAN_SHADOW_GRANULARITY; |
| 654 | + // First granule |
| 655 | + uptr src_beg_down = RoundDownTo(src_beg, granularity); |
| 656 | + uptr dst_beg_down = RoundDownTo(dst_beg, granularity); |
| 657 | + if (dst_beg_down == dst_beg) |
| 658 | + return; |
| 659 | + if (!AddressIsPoisoned(src_beg)) |
| 660 | + *(u8 *)MemToShadow(dst_beg_down) = *(u8 *)MemToShadow(src_beg_down); |
| 661 | + else if (!AddressIsPoisoned(dst_beg)) |
| 662 | + SetContainerGranule(dst_beg_down, dst_beg - dst_beg_down); |
| 663 | +} |
| 664 | + |
| 665 | +// A helper function for __sanitizer_copy_contiguous_container_annotations, |
| 666 | +// has assumption about begin and end of the container. |
| 667 | +// Should not be used stand alone. |
| 668 | +static void CopyContainerLastGranuleAnnotation(uptr src_end, uptr dst_end) { |
| 669 | + constexpr uptr granularity = ASAN_SHADOW_GRANULARITY; |
| 670 | + // Last granule |
| 671 | + uptr src_end_down = RoundDownTo(src_end, granularity); |
| 672 | + uptr dst_end_down = RoundDownTo(dst_end, granularity); |
| 673 | + if (dst_end_down == dst_end || !AddressIsPoisoned(dst_end)) |
| 674 | + return; |
| 675 | + if (AddressIsPoisoned(src_end)) |
| 676 | + *(u8 *)MemToShadow(dst_end_down) = *(u8 *)MemToShadow(src_end_down); |
| 677 | + else |
| 678 | + SetContainerGranule(dst_end_down, src_end - src_end_down); |
| 679 | +} |
| 680 | + |
| 681 | +// This function copies ASan memory annotations (poisoned/unpoisoned states) |
| 682 | +// from one buffer to another. |
| 683 | +// It's main purpose is to help with relocating trivially relocatable objects, |
| 684 | +// which memory may be poisoned, without calling copy constructor. |
| 685 | +// However, it does not move memory content itself, only annotations. |
| 686 | +// If the buffers aren't aligned (the distance between buffers isn't |
| 687 | +// granule-aligned) |
| 688 | +// // src_beg % granularity != dst_beg % granularity |
| 689 | +// the function handles this by going byte by byte, slowing down performance. |
| 690 | +// The old buffer annotations are not removed. If necessary, |
| 691 | +// user can unpoison old buffer with __asan_unpoison_memory_region. |
| 692 | +void __sanitizer_copy_contiguous_container_annotations(const void *src_beg_p, |
| 693 | + const void *src_end_p, |
| 694 | + const void *dst_beg_p, |
| 695 | + const void *dst_end_p) { |
| 696 | + if (!flags()->detect_container_overflow) |
| 697 | + return; |
| 698 | + |
| 699 | + VPrintf(3, "contiguous_container_src: %p %p\n", src_beg_p, src_end_p); |
| 700 | + VPrintf(3, "contiguous_container_dst: %p %p\n", dst_beg_p, dst_end_p); |
| 701 | + |
| 702 | + uptr src_beg = reinterpret_cast<uptr>(src_beg_p); |
| 703 | + uptr src_end = reinterpret_cast<uptr>(src_end_p); |
| 704 | + uptr dst_beg = reinterpret_cast<uptr>(dst_beg_p); |
| 705 | + uptr dst_end = reinterpret_cast<uptr>(dst_end_p); |
| 706 | + |
| 707 | + constexpr uptr granularity = ASAN_SHADOW_GRANULARITY; |
| 708 | + |
| 709 | + if (src_beg > src_end || (dst_end - dst_beg) != (src_end - src_beg)) { |
| 710 | + GET_STACK_TRACE_FATAL_HERE; |
| 711 | + ReportBadParamsToCopyContiguousContainerAnnotations( |
| 712 | + src_beg, src_end, dst_beg, dst_end, &stack); |
| 713 | + } |
| 714 | + |
| 715 | + if (src_beg == src_end || src_beg == dst_beg) |
| 716 | + return; |
| 717 | + // Due to support for overlapping buffers, we may have to copy elements |
| 718 | + // in reversed order, when destination buffer starts in the middle of |
| 719 | + // the source buffer (or shares first granule with it). |
| 720 | + // |
| 721 | + // When buffers are not granule-aligned (or distance between them, |
| 722 | + // to be specific), annotatios have to be copied byte by byte. |
| 723 | + // |
| 724 | + // The only remaining edge cases involve edge granules, |
| 725 | + // when the container starts or ends within a granule. |
| 726 | + uptr src_beg_up = RoundUpTo(src_beg, granularity); |
| 727 | + uptr src_end_up = RoundUpTo(src_end, granularity); |
| 728 | + bool copy_in_reversed_order = src_beg < dst_beg && dst_beg <= src_end_up; |
| 729 | + if (src_beg % granularity != dst_beg % granularity || |
| 730 | + RoundDownTo(dst_end - 1, granularity) <= dst_beg) { |
| 731 | + if (copy_in_reversed_order) |
| 732 | + SlowReversedCopyContainerAnnotations(src_beg, src_end, dst_beg, dst_end); |
| 733 | + else |
| 734 | + SlowCopyContainerAnnotations(src_beg, src_end, dst_beg, dst_end); |
| 735 | + return; |
| 736 | + } |
| 737 | + |
| 738 | + // As buffers are granule-aligned, we can just copy annotations of granules |
| 739 | + // from the middle. |
| 740 | + uptr dst_beg_up = RoundUpTo(dst_beg, granularity); |
| 741 | + uptr dst_end_down = RoundDownTo(dst_end, granularity); |
| 742 | + if (copy_in_reversed_order) |
| 743 | + CopyContainerLastGranuleAnnotation(src_end, dst_end); |
| 744 | + else |
| 745 | + CopyContainerFirstGranuleAnnotation(src_beg, dst_beg); |
| 746 | + |
| 747 | + if (dst_beg_up < dst_end_down) { |
| 748 | + internal_memmove((u8 *)MemToShadow(dst_beg_up), |
| 749 | + (u8 *)MemToShadow(src_beg_up), |
| 750 | + (dst_end_down - dst_beg_up) / granularity); |
| 751 | + } |
| 752 | + |
| 753 | + if (copy_in_reversed_order) |
| 754 | + CopyContainerFirstGranuleAnnotation(src_beg, dst_beg); |
| 755 | + else |
| 756 | + CopyContainerLastGranuleAnnotation(src_end, dst_end); |
| 757 | +} |
| 758 | + |
579 | 759 | static const void *FindBadAddress(uptr begin, uptr end, bool poisoned) {
|
580 | 760 | CHECK_LE(begin, end);
|
581 | 761 | constexpr uptr kMaxRangeToCheck = 32;
|
|
0 commit comments