Skip to content

Commit 0ab9216

Browse files
committed
Not overlapping containers
Implemented correct container annotations copying function (non-overlapping containers only) Refactored the initial PoC to a fully functional implementation for copying container annotations without moving/copying the memory content itself. This function currently supports cases where the source and destination containers do not overlap. Handling overlapping containers would add complexity. The function is designed to work irrespective of whether the buffers are granule-aligned or the distance between them is granule-aligned. However, such scenarios may have an impact on performance. A Test case has been included to verify the correctness of the implementation. Removed unpoisoning oryginal buffer at the end, users can do it by themselves if they want to.
1 parent 2db13f5 commit 0ab9216

File tree

2 files changed

+135
-86
lines changed

2 files changed

+135
-86
lines changed

compiler-rt/lib/asan/asan_poisoning.cpp

Lines changed: 109 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -576,8 +576,46 @@ void __sanitizer_annotate_double_ended_contiguous_container(
576576
}
577577
}
578578

579-
// This function moves annotation from one buffer to another.
580-
// Old buffer is unpoisoned at the end.
579+
static bool WithinOneGranule(uptr p, uptr q) {
580+
if (p == q)
581+
return true;
582+
return RoundDownTo(p, ASAN_SHADOW_GRANULARITY) ==
583+
RoundDownTo(q - 1, ASAN_SHADOW_GRANULARITY);
584+
}
585+
586+
static void PoisonContainer(uptr storage_beg, uptr storage_end) {
587+
constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
588+
uptr internal_beg = RoundUpTo(storage_beg, granularity);
589+
uptr external_beg = RoundDownTo(storage_beg, granularity);
590+
uptr internal_end = RoundDownTo(storage_end, granularity);
591+
592+
if (internal_end > internal_beg)
593+
PoisonShadow(internal_beg, internal_end - internal_beg,
594+
kAsanContiguousContainerOOBMagic);
595+
// The new buffer may start in the middle of a granule.
596+
if (internal_beg != storage_beg && internal_beg < internal_end &&
597+
!AddressIsPoisoned(storage_beg)) {
598+
*(u8 *)MemToShadow(external_beg) =
599+
static_cast<u8>(storage_beg - external_beg);
600+
}
601+
// The new buffer may end in the middle of a granule.
602+
if (internal_end != storage_end && AddressIsPoisoned(storage_end)) {
603+
*(u8 *)MemToShadow(internal_end) =
604+
static_cast<u8>(kAsanContiguousContainerOOBMagic);
605+
}
606+
}
607+
608+
// This function copies ASan memory annotations (poisoned/unpoisoned states)
609+
// from one buffer to another.
610+
// It's main purpose is to help with relocating trivially relocatable objects,
611+
// which memory may be poisoned, without calling copy constructor.
612+
// However, it does not move memory content itself, only annotations.
613+
// If the buffers aren't aligned (the distance between buffers isn't
614+
// granule-aligned)
615+
// // old_storage_beg % granularity != new_storage_beg % granularity
616+
// the function handles this by going byte by byte, slowing down performance.
617+
// The old buffer annotations are not removed. If necessary,
618+
// user can unpoison old buffer with __asan_unpoison_memory_region.
581619
void __sanitizer_move_contiguous_container_annotations(
582620
const void *old_storage_beg_p, const void *old_storage_end_p,
583621
const void *new_storage_beg_p, const void *new_storage_end_p) {
@@ -606,6 +644,9 @@ void __sanitizer_move_contiguous_container_annotations(
606644
&stack);
607645
}
608646

647+
if (old_storage_beg == old_storage_end)
648+
return;
649+
609650
uptr new_internal_beg = RoundUpTo(new_storage_beg, granularity);
610651
uptr old_internal_beg = RoundUpTo(old_storage_beg, granularity);
611652
uptr new_external_beg = RoundDownTo(new_storage_beg, granularity);
@@ -615,93 +656,94 @@ void __sanitizer_move_contiguous_container_annotations(
615656

616657
// At the very beginning we poison the whole buffer.
617658
// Later we unpoison what is necessary.
618-
PoisonShadow(new_internal_beg, new_internal_end - new_internal_beg,
619-
kAsanContiguousContainerOOBMagic);
620-
if (new_internal_beg != new_storage_beg) {
621-
uptr new_unpoisoned = *(u8 *)MemToShadow(new_external_beg);
622-
if (new_unpoisoned > (new_storage_beg - new_external_beg)) {
623-
*(u8 *)MemToShadow(new_external_beg) =
624-
static_cast<u8>(new_storage_beg - new_external_beg);
625-
}
626-
}
627-
if (new_internal_end != new_storage_end) {
628-
uptr new_unpoisoned = *(u8 *)MemToShadow(new_internal_end);
629-
if (new_unpoisoned <= (new_storage_end - new_internal_end)) {
630-
*(u8 *)MemToShadow(new_external_beg) =
631-
static_cast<u8>(kAsanContiguousContainerOOBMagic);
632-
}
633-
}
659+
PoisonContainer(new_storage_beg, new_storage_end);
634660

635661
// There are two cases.
636662
// 1) Distance between buffers is granule-aligned.
637-
// 2) It's not aligned, that case is slower.
663+
// 2) It's not aligned, and therefore requires going byte by byte.
638664
if (old_storage_beg % granularity == new_storage_beg % granularity) {
639665
// When buffers are aligned in the same way, we can just copy shadow memory,
640-
// except first and last granule.
641-
__builtin_memcpy((u8 *)MemToShadow(new_internal_beg),
642-
(u8 *)MemToShadow(old_internal_beg),
643-
(new_internal_end - new_internal_beg) / granularity);
644-
// In first granule we cannot poison anything before beginning of the
645-
// container.
646-
if (new_internal_beg != new_storage_beg) {
647-
uptr old_unpoisoned = *(u8 *)MemToShadow(old_external_beg);
648-
uptr new_unpoisoned = *(u8 *)MemToShadow(new_external_beg);
649-
650-
if (old_unpoisoned > old_storage_beg - old_external_beg) {
651-
*(u8 *)MemToShadow(new_external_beg) = old_unpoisoned;
652-
} else if (new_unpoisoned > new_storage_beg - new_external_beg) {
653-
*(u8 *)MemToShadow(new_external_beg) =
654-
new_storage_beg - new_external_beg;
655-
}
656-
}
657-
// In last granule we cannot poison anything after the end of the container.
658-
if (new_internal_end != new_storage_end) {
659-
uptr old_unpoisoned = *(u8 *)MemToShadow(old_internal_end);
660-
uptr new_unpoisoned = *(u8 *)MemToShadow(new_internal_end);
661-
if (new_unpoisoned <= new_storage_end - new_internal_end &&
662-
old_unpoisoned < new_unpoisoned) {
663-
*(u8 *)MemToShadow(new_internal_end) = old_unpoisoned;
666+
// except the first and the last granule.
667+
if (new_internal_end > new_internal_beg)
668+
__builtin_memcpy((u8 *)MemToShadow(new_internal_beg),
669+
(u8 *)MemToShadow(old_internal_beg),
670+
(new_internal_end - new_internal_beg) / granularity);
671+
// If the beginning and the end of the storage are aligned, we are done.
672+
// Otherwise, we have to handle remaining granules.
673+
if (new_internal_beg != new_storage_beg ||
674+
new_internal_end != new_storage_end) {
675+
if (WithinOneGranule(new_storage_beg, new_storage_end)) {
676+
if (new_internal_end == new_storage_end) {
677+
if (!AddressIsPoisoned(old_storage_beg)) {
678+
*(u8 *)MemToShadow(new_external_beg) =
679+
*(u8 *)MemToShadow(old_external_beg);
680+
} else if (!AddressIsPoisoned(new_storage_beg)) {
681+
*(u8 *)MemToShadow(new_external_beg) =
682+
new_storage_beg - new_external_beg;
683+
}
684+
} else if (AddressIsPoisoned(new_storage_end)) {
685+
if (!AddressIsPoisoned(old_storage_beg)) {
686+
*(u8 *)MemToShadow(new_external_beg) =
687+
AddressIsPoisoned(old_storage_end)
688+
? *(u8 *)MemToShadow(old_internal_end)
689+
: new_storage_end - new_external_beg;
690+
} else if (!AddressIsPoisoned(new_storage_beg)) {
691+
*(u8 *)MemToShadow(new_external_beg) =
692+
(new_storage_beg == new_external_beg)
693+
? static_cast<u8>(kAsanContiguousContainerOOBMagic)
694+
: new_storage_beg - new_external_beg;
695+
}
696+
}
697+
} else {
698+
// Buffer is not within one granule!
699+
if (new_internal_beg != new_storage_beg) {
700+
if (!AddressIsPoisoned(old_storage_beg)) {
701+
*(u8 *)MemToShadow(new_external_beg) =
702+
*(u8 *)MemToShadow(old_external_beg);
703+
} else if (!AddressIsPoisoned(new_storage_beg)) {
704+
*(u8 *)MemToShadow(new_external_beg) =
705+
new_storage_beg - new_external_beg;
706+
}
707+
}
708+
if (new_internal_end != new_storage_end &&
709+
AddressIsPoisoned(new_storage_end)) {
710+
*(u8 *)MemToShadow(new_internal_end) =
711+
AddressIsPoisoned(old_storage_end)
712+
? *(u8 *)MemToShadow(old_internal_end)
713+
: old_storage_end - old_internal_end;
714+
}
664715
}
665716
}
666717
} else {
667718
// If buffers are not aligned, we have to go byte by byte.
668719
uptr old_ptr = old_storage_beg;
669720
uptr new_ptr = new_storage_beg;
670721
uptr next_new;
671-
for (; new_ptr + granularity <= new_storage_end;) {
722+
for (; new_ptr < new_storage_end;) {
672723
next_new = RoundUpTo(new_ptr + 1, granularity);
673724
uptr unpoison_to = 0;
674-
for (; new_ptr != next_new; ++new_ptr, ++old_ptr) {
725+
for (; new_ptr != next_new && new_ptr != new_storage_end;
726+
++new_ptr, ++old_ptr) {
675727
if (!AddressIsPoisoned(old_ptr)) {
676728
unpoison_to = new_ptr + 1;
677729
}
678730
}
679-
if (unpoison_to != 0) {
680-
uptr granule_beg = new_ptr - granularity;
681-
uptr value = unpoison_to - granule_beg;
682-
*(u8 *)MemToShadow(granule_beg) = static_cast<u8>(value);
683-
}
684-
}
685-
// Only case left is the end of the container in the middle of a granule.
686-
// If memory after the end is unpoisoned, we cannot change anything.
687-
// But if it's poisoned, we should unpoison as little as possible.
688-
if (new_ptr != new_storage_end && AddressIsPoisoned(new_storage_end)) {
689-
uptr unpoison_to = 0;
690-
for (; new_ptr != new_storage_end; ++new_ptr, ++old_ptr) {
691-
if (!AddressIsPoisoned(old_ptr)) {
692-
unpoison_to = new_ptr + 1;
731+
if (new_ptr < new_storage_end || new_ptr == new_internal_end ||
732+
AddressIsPoisoned(new_storage_end)) {
733+
uptr granule_beg = RoundDownTo(new_ptr - 1, granularity);
734+
if (unpoison_to != 0) {
735+
uptr value =
736+
(unpoison_to == next_new) ? 0 : unpoison_to - granule_beg;
737+
*(u8 *)MemToShadow(granule_beg) = static_cast<u8>(value);
738+
} else {
739+
*(u8 *)MemToShadow(granule_beg) =
740+
(granule_beg >= new_storage_beg)
741+
? static_cast<u8>(kAsanContiguousContainerOOBMagic)
742+
: new_storage_beg - granule_beg;
693743
}
694744
}
695-
if (unpoison_to != 0) {
696-
uptr granule_beg = RoundDownTo(new_storage_end, granularity);
697-
uptr value = unpoison_to - granule_beg;
698-
*(u8 *)MemToShadow(granule_beg) = static_cast<u8>(value);
699-
}
700745
}
701746
}
702-
703-
__asan_unpoison_memory_region((void *)old_storage_beg,
704-
old_storage_end - old_storage_beg);
705747
}
706748

707749
static const void *FindBadAddress(uptr begin, uptr end, bool poisoned) {

compiler-rt/test/asan/TestCases/move_container_annotations.cpp

Lines changed: 26 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,15 @@ template <class T> static constexpr T RoundUp(T x) {
2727

2828
static std::deque<int> GetPoisonedState(char *begin, char *end) {
2929
std::deque<int> result;
30-
for (; begin != end; ++begin) {
31-
result.push_back(__asan_address_is_poisoned(begin));
30+
for (char *ptr = begin; ptr != end; ++ptr) {
31+
result.push_back(__asan_address_is_poisoned(ptr));
3232
}
3333
return result;
3434
}
3535

3636
static void RandomPoison(char *beg, char *end) {
37-
if (beg != RoundDown(beg) && (rand() % 2 == 1)) {
37+
if (beg != RoundDown(beg) && RoundDown(beg) != RoundDown(end) &&
38+
rand() % 2 == 1) {
3839
__asan_poison_memory_region(beg, RoundUp(beg) - beg);
3940
__asan_unpoison_memory_region(beg, rand() % (RoundUp(beg) - beg + 1));
4041
}
@@ -70,31 +71,36 @@ void TestMove(size_t capacity, size_t off_old, size_t off_new,
7071
char *new_buffer_end = new_buffer + new_buffer_size;
7172
bool poison_old = poison_buffers % 2 == 1;
7273
bool poison_new = poison_buffers / 2 == 1;
73-
if (poison_old)
74-
__asan_poison_memory_region(old_buffer, old_buffer_size);
75-
if (poison_new)
76-
__asan_poison_memory_region(new_buffer, new_buffer_size);
7774
char *old_beg = old_buffer + off_old;
7875
char *new_beg = new_buffer + off_new;
7976
char *old_end = old_beg + capacity;
8077
char *new_end = new_beg + capacity;
8178

82-
for (int i = 0; i < 1000; i++) {
79+
for (int i = 0; i < 75; i++) {
80+
if (poison_old)
81+
__asan_poison_memory_region(old_buffer, old_buffer_size);
82+
if (poison_new)
83+
__asan_poison_memory_region(new_buffer, new_buffer_size);
84+
8385
RandomPoison(old_beg, old_end);
84-
std::deque<int> poison_states(old_beg, old_end);
86+
std::deque<int> poison_states = GetPoisonedState(old_beg, old_end);
8587
__sanitizer_move_contiguous_container_annotations(old_beg, old_end, new_beg,
8688
new_end);
8789

8890
// If old_buffer were poisoned, expected state of memory before old_beg
8991
// is undetermined.
9092
// If old buffer were not poisoned, that memory should still be unpoisoned.
91-
// Area between old_beg and old_end should never be poisoned.
92-
char *cur = poison_old ? old_beg : old_buffer;
93-
for (; cur < old_end; ++cur) {
94-
assert(!__asan_address_is_poisoned(cur));
93+
char *cur;
94+
if (!poison_old) {
95+
for (cur = old_buffer; cur < old_beg; ++cur) {
96+
assert(!__asan_address_is_poisoned(cur));
97+
}
98+
}
99+
for (size_t i = 0; i < poison_states.size(); ++i) {
100+
assert(__asan_address_is_poisoned(&old_beg[i]) == poison_states[i]);
95101
}
96-
// Memory after old_beg should be the same as at the beginning.
97-
for (; cur < old_buffer_end; ++cur) {
102+
// Memory after old_end should be the same as at the beginning.
103+
for (cur = old_end; cur < old_buffer_end; ++cur) {
98104
assert(__asan_address_is_poisoned(cur) == poison_old);
99105
}
100106

@@ -118,7 +124,8 @@ void TestMove(size_t capacity, size_t off_old, size_t off_new,
118124
}
119125
}
120126
// [cur; new_end) is not checked yet.
121-
// If new_buffer were not poisoned, it cannot be poisoned and we can ignore check.
127+
// If new_buffer were not poisoned, it cannot be poisoned and we can ignore
128+
// a separate check.
122129
// If new_buffer were poisoned, it should be same as earlier.
123130
if (cur < new_end && poison_new) {
124131
size_t unpoisoned = count_unpoisoned(poison_states, new_end - cur);
@@ -143,9 +150,9 @@ void TestMove(size_t capacity, size_t off_old, size_t off_new,
143150

144151
int main(int argc, char **argv) {
145152
int n = argc == 1 ? 64 : atoi(argv[1]);
146-
for (int i = 0; i <= n; i++) {
147-
for (int j = 0; j < kGranularity * 2; j++) {
148-
for (int k = 0; k < kGranularity * 2; k++) {
153+
for (size_t j = 0; j < kGranularity * 2; j++) {
154+
for (size_t k = 0; k < kGranularity * 2; k++) {
155+
for (int i = 0; i <= n; i++) {
149156
for (int poison = 0; poison < 4; ++poison) {
150157
TestMove(i, j, k, poison);
151158
}

0 commit comments

Comments
 (0)