diff --git a/compiler-rt/include/sanitizer/common_interface_defs.h b/compiler-rt/include/sanitizer/common_interface_defs.h
index f9fce595b37b..57313f9bc80e 100644
--- a/compiler-rt/include/sanitizer/common_interface_defs.h
+++ b/compiler-rt/include/sanitizer/common_interface_defs.h
@@ -193,6 +193,43 @@ void SANITIZER_CDECL __sanitizer_annotate_double_ended_contiguous_container(
const void *old_container_beg, const void *old_container_end,
const void *new_container_beg, const void *new_container_end);
+/// Copies memory annotations from a source storage region to a destination
+/// storage region. After the operation, the destination region has the same
+/// memory annotations as the source region, as long as sanitizer limitations
+/// allow it (more bytes may be unpoisoned than in the source region, resulting
+/// in more false negatives, but never false positives). If the source and
+/// destination regions overlap, only the minimal required changes are made to
+/// preserve the correct annotations. Old storage bytes that are not in the new
+/// storage should have the same annotations, as long as sanitizer limitations
+/// allow it.
+///
+/// This function is primarily designed to be used when moving trivially
+/// relocatable objects that may have poisoned memory, making direct copying
+/// problematic under sanitizer. However, this function does not move memory
+/// content itself, only annotations.
+///
+/// A contiguous container is a container that keeps all of its elements in a
+/// contiguous region of memory. The container owns the region of memory
+/// [src_begin, src_end) and [dst_begin, dst_end). The memory
+/// within these regions may be alternately poisoned and non-poisoned, with
+/// possibly smaller poisoned and unpoisoned regions.
+///
+/// If this function fully poisons a granule, it is marked as "container
+/// overflow".
+///
+/// Argument requirements: The destination container must have the same size as
+/// the source container, which is inferred from the beginning and end of the
+/// source region. Addresses may be granule-unaligned, but this may affect
+/// performance.
+///
+/// \param src_begin Begin of the source container region.
+/// \param src_end End of the source container region.
+/// \param dst_begin Begin of the destination container region.
+/// \param dst_end End of the destination container region.
+void SANITIZER_CDECL __sanitizer_copy_contiguous_container_annotations(
+ const void *src_begin, const void *src_end, const void *dst_begin,
+ const void *dst_end);
+
/// Returns true if the contiguous container [beg, end) is properly
/// poisoned.
///
diff --git a/compiler-rt/lib/asan/asan_errors.cpp b/compiler-rt/lib/asan/asan_errors.cpp
index 6f2fd28bfdf1..4f112cc5d1bc 100644
--- a/compiler-rt/lib/asan/asan_errors.cpp
+++ b/compiler-rt/lib/asan/asan_errors.cpp
@@ -348,6 +348,20 @@ void ErrorBadParamsToAnnotateDoubleEndedContiguousContainer::Print() {
ReportErrorSummary(scariness.GetDescription(), stack);
}
+void ErrorBadParamsToCopyContiguousContainerAnnotations::Print() {
+ Report(
+ "ERROR: AddressSanitizer: bad parameters to "
+ "__sanitizer_copy_contiguous_container_annotations:\n"
+ " src_storage_beg : %p\n"
+ " src_storage_end : %p\n"
+ " dst_storage_beg : %p\n"
+ " new_storage_end : %p\n",
+ (void *)old_storage_beg, (void *)old_storage_end, (void *)new_storage_beg,
+ (void *)new_storage_end);
+ stack->Print();
+ ReportErrorSummary(scariness.GetDescription(), stack);
+}
+
void ErrorODRViolation::Print() {
Decorator d;
Printf("%s", d.Error());
diff --git a/compiler-rt/lib/asan/asan_errors.h b/compiler-rt/lib/asan/asan_errors.h
index 634f6da54435..b3af655e6663 100644
--- a/compiler-rt/lib/asan/asan_errors.h
+++ b/compiler-rt/lib/asan/asan_errors.h
@@ -353,6 +353,24 @@ struct ErrorBadParamsToAnnotateDoubleEndedContiguousContainer : ErrorBase {
void Print();
};
+struct ErrorBadParamsToCopyContiguousContainerAnnotations : ErrorBase {
+ const BufferedStackTrace *stack;
+ uptr old_storage_beg, old_storage_end, new_storage_beg, new_storage_end;
+
+ ErrorBadParamsToCopyContiguousContainerAnnotations() = default; // (*)
+ ErrorBadParamsToCopyContiguousContainerAnnotations(
+ u32 tid, BufferedStackTrace *stack_, uptr old_storage_beg_,
+ uptr old_storage_end_, uptr new_storage_beg_, uptr new_storage_end_)
+ : ErrorBase(tid, 10,
+ "bad-__sanitizer_annotate_double_ended_contiguous_container"),
+ stack(stack_),
+ old_storage_beg(old_storage_beg_),
+ old_storage_end(old_storage_end_),
+ new_storage_beg(new_storage_beg_),
+ new_storage_end(new_storage_end_) {}
+ void Print();
+};
+
struct ErrorODRViolation : ErrorBase {
__asan_global global1, global2;
u32 stack_id1, stack_id2;
@@ -421,6 +439,7 @@ struct ErrorGeneric : ErrorBase {
macro(StringFunctionSizeOverflow) \
macro(BadParamsToAnnotateContiguousContainer) \
macro(BadParamsToAnnotateDoubleEndedContiguousContainer) \
+ macro(BadParamsToCopyContiguousContainerAnnotations) \
macro(ODRViolation) \
macro(InvalidPointerPair) \
macro(Generic)
diff --git a/compiler-rt/lib/asan/asan_poisoning.cpp b/compiler-rt/lib/asan/asan_poisoning.cpp
index d600b1a0c241..762670632f4e 100644
--- a/compiler-rt/lib/asan/asan_poisoning.cpp
+++ b/compiler-rt/lib/asan/asan_poisoning.cpp
@@ -16,6 +16,7 @@
#include "asan_report.h"
#include "asan_stack.h"
#include "sanitizer_common/sanitizer_atomic.h"
+#include "sanitizer_common/sanitizer_common.h"
#include "sanitizer_common/sanitizer_flags.h"
#include "sanitizer_common/sanitizer_interface_internal.h"
#include "sanitizer_common/sanitizer_libc.h"
@@ -576,6 +577,185 @@ void __sanitizer_annotate_double_ended_contiguous_container(
}
}
+// Marks the specified number of bytes in a granule as accessible or
+// poisones the whole granule with kAsanContiguousContainerOOBMagic value.
+static void SetContainerGranule(uptr ptr, u8 n) {
+ constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
+ u8 s = (n == granularity) ? 0 : (n ? n : kAsanContiguousContainerOOBMagic);
+ *(u8 *)MemToShadow(ptr) = s;
+}
+
+// Performs a byte-by-byte copy of ASan annotations (shadow memory values).
+// Result may be different due to ASan limitations, but result cannot lead
+// to false positives (more memory than requested may get unpoisoned).
+static void SlowCopyContainerAnnotations(uptr src_beg, uptr src_end,
+ uptr dst_beg, uptr dst_end) {
+ constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
+ uptr dst_end_down = RoundDownTo(dst_end, granularity);
+ uptr src_ptr = src_beg;
+ uptr dst_ptr = dst_beg;
+
+ while (dst_ptr < dst_end) {
+ uptr granule_beg = RoundDownTo(dst_ptr, granularity);
+ uptr granule_end = granule_beg + granularity;
+ uptr unpoisoned_bytes = 0;
+
+ uptr end = Min(granule_end, dst_end);
+ for (; dst_ptr != end; ++dst_ptr, ++src_ptr)
+ if (!AddressIsPoisoned(src_ptr))
+ unpoisoned_bytes = dst_ptr - granule_beg + 1;
+
+ if (dst_ptr == dst_end && dst_end != dst_end_down &&
+ !AddressIsPoisoned(dst_end))
+ continue;
+
+ if (unpoisoned_bytes != 0 || granule_beg >= dst_beg)
+ SetContainerGranule(granule_beg, unpoisoned_bytes);
+ else if (!AddressIsPoisoned(dst_beg))
+ SetContainerGranule(granule_beg, dst_beg - granule_beg);
+ }
+}
+
+// Performs a byte-by-byte copy of ASan annotations (shadow memory values),
+// going through bytes in reversed order, but not reversing annotations.
+// Result may be different due to ASan limitations, but result cannot lead
+// to false positives (more memory than requested may get unpoisoned).
+static void SlowReversedCopyContainerAnnotations(uptr src_beg, uptr src_end,
+ uptr dst_beg, uptr dst_end) {
+ constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
+ uptr dst_end_down = RoundDownTo(dst_end, granularity);
+ uptr src_ptr = src_end;
+ uptr dst_ptr = dst_end;
+
+ while (dst_ptr > dst_beg) {
+ uptr granule_beg = RoundDownTo(dst_ptr - 1, granularity);
+ uptr unpoisoned_bytes = 0;
+
+ uptr end = Max(granule_beg, dst_beg);
+ for (; dst_ptr != end; --dst_ptr, --src_ptr)
+ if (unpoisoned_bytes == 0 && !AddressIsPoisoned(src_ptr - 1))
+ unpoisoned_bytes = dst_ptr - granule_beg;
+
+ if (dst_ptr >= dst_end_down && !AddressIsPoisoned(dst_end))
+ continue;
+
+ if (granule_beg == dst_ptr || unpoisoned_bytes != 0)
+ SetContainerGranule(granule_beg, unpoisoned_bytes);
+ else if (!AddressIsPoisoned(dst_beg))
+ SetContainerGranule(granule_beg, dst_beg - granule_beg);
+ }
+}
+
+// A helper function for __sanitizer_copy_contiguous_container_annotations,
+// has assumption about begin and end of the container.
+// Should not be used stand alone.
+static void CopyContainerFirstGranuleAnnotation(uptr src_beg, uptr dst_beg) {
+ constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
+ // First granule
+ uptr src_beg_down = RoundDownTo(src_beg, granularity);
+ uptr dst_beg_down = RoundDownTo(dst_beg, granularity);
+ if (dst_beg_down == dst_beg)
+ return;
+ if (!AddressIsPoisoned(src_beg))
+ *(u8 *)MemToShadow(dst_beg_down) = *(u8 *)MemToShadow(src_beg_down);
+ else if (!AddressIsPoisoned(dst_beg))
+ SetContainerGranule(dst_beg_down, dst_beg - dst_beg_down);
+}
+
+// A helper function for __sanitizer_copy_contiguous_container_annotations,
+// has assumption about begin and end of the container.
+// Should not be used stand alone.
+static void CopyContainerLastGranuleAnnotation(uptr src_end, uptr dst_end) {
+ constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
+ // Last granule
+ uptr src_end_down = RoundDownTo(src_end, granularity);
+ uptr dst_end_down = RoundDownTo(dst_end, granularity);
+ if (dst_end_down == dst_end || !AddressIsPoisoned(dst_end))
+ return;
+ if (AddressIsPoisoned(src_end))
+ *(u8 *)MemToShadow(dst_end_down) = *(u8 *)MemToShadow(src_end_down);
+ else
+ SetContainerGranule(dst_end_down, src_end - src_end_down);
+}
+
+// This function copies ASan memory annotations (poisoned/unpoisoned states)
+// from one buffer to another.
+// It's main purpose is to help with relocating trivially relocatable objects,
+// which memory may be poisoned, without calling copy constructor.
+// However, it does not move memory content itself, only annotations.
+// If the buffers aren't aligned (the distance between buffers isn't
+// granule-aligned)
+// // src_beg % granularity != dst_beg % granularity
+// the function handles this by going byte by byte, slowing down performance.
+// The old buffer annotations are not removed. If necessary,
+// user can unpoison old buffer with __asan_unpoison_memory_region.
+void __sanitizer_copy_contiguous_container_annotations(const void *src_beg_p,
+ const void *src_end_p,
+ const void *dst_beg_p,
+ const void *dst_end_p) {
+ if (!flags()->detect_container_overflow)
+ return;
+
+ VPrintf(3, "contiguous_container_src: %p %p\n", src_beg_p, src_end_p);
+ VPrintf(3, "contiguous_container_dst: %p %p\n", dst_beg_p, dst_end_p);
+
+ uptr src_beg = reinterpret_cast(src_beg_p);
+ uptr src_end = reinterpret_cast(src_end_p);
+ uptr dst_beg = reinterpret_cast(dst_beg_p);
+ uptr dst_end = reinterpret_cast(dst_end_p);
+
+ constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
+
+ if (src_beg > src_end || (dst_end - dst_beg) != (src_end - src_beg)) {
+ GET_STACK_TRACE_FATAL_HERE;
+ ReportBadParamsToCopyContiguousContainerAnnotations(
+ src_beg, src_end, dst_beg, dst_end, &stack);
+ }
+
+ if (src_beg == src_end || src_beg == dst_beg)
+ return;
+ // Due to support for overlapping buffers, we may have to copy elements
+ // in reversed order, when destination buffer starts in the middle of
+ // the source buffer (or shares first granule with it).
+ //
+ // When buffers are not granule-aligned (or distance between them,
+ // to be specific), annotatios have to be copied byte by byte.
+ //
+ // The only remaining edge cases involve edge granules,
+ // when the container starts or ends within a granule.
+ uptr src_beg_up = RoundUpTo(src_beg, granularity);
+ uptr src_end_up = RoundUpTo(src_end, granularity);
+ bool copy_in_reversed_order = src_beg < dst_beg && dst_beg <= src_end_up;
+ if (src_beg % granularity != dst_beg % granularity ||
+ RoundDownTo(dst_end - 1, granularity) <= dst_beg) {
+ if (copy_in_reversed_order)
+ SlowReversedCopyContainerAnnotations(src_beg, src_end, dst_beg, dst_end);
+ else
+ SlowCopyContainerAnnotations(src_beg, src_end, dst_beg, dst_end);
+ return;
+ }
+
+ // As buffers are granule-aligned, we can just copy annotations of granules
+ // from the middle.
+ uptr dst_beg_up = RoundUpTo(dst_beg, granularity);
+ uptr dst_end_down = RoundDownTo(dst_end, granularity);
+ if (copy_in_reversed_order)
+ CopyContainerLastGranuleAnnotation(src_end, dst_end);
+ else
+ CopyContainerFirstGranuleAnnotation(src_beg, dst_beg);
+
+ if (dst_beg_up < dst_end_down) {
+ internal_memmove((u8 *)MemToShadow(dst_beg_up),
+ (u8 *)MemToShadow(src_beg_up),
+ (dst_end_down - dst_beg_up) / granularity);
+ }
+
+ if (copy_in_reversed_order)
+ CopyContainerFirstGranuleAnnotation(src_beg, dst_beg);
+ else
+ CopyContainerLastGranuleAnnotation(src_end, dst_end);
+}
+
static const void *FindBadAddress(uptr begin, uptr end, bool poisoned) {
CHECK_LE(begin, end);
constexpr uptr kMaxRangeToCheck = 32;
diff --git a/compiler-rt/lib/asan/asan_report.cpp b/compiler-rt/lib/asan/asan_report.cpp
index fd590e401f67..45aa607dcda0 100644
--- a/compiler-rt/lib/asan/asan_report.cpp
+++ b/compiler-rt/lib/asan/asan_report.cpp
@@ -367,6 +367,16 @@ void ReportBadParamsToAnnotateDoubleEndedContiguousContainer(
in_report.ReportError(error);
}
+void ReportBadParamsToCopyContiguousContainerAnnotations(
+ uptr old_storage_beg, uptr old_storage_end, uptr new_storage_beg,
+ uptr new_storage_end, BufferedStackTrace *stack) {
+ ScopedInErrorReport in_report;
+ ErrorBadParamsToCopyContiguousContainerAnnotations error(
+ GetCurrentTidOrInvalid(), stack, old_storage_beg, old_storage_end,
+ new_storage_beg, new_storage_end);
+ in_report.ReportError(error);
+}
+
void ReportODRViolation(const __asan_global *g1, u32 stack_id1,
const __asan_global *g2, u32 stack_id2) {
ScopedInErrorReport in_report;
diff --git a/compiler-rt/lib/asan/asan_report.h b/compiler-rt/lib/asan/asan_report.h
index 3540b3b4b1bf..3143d83abe39 100644
--- a/compiler-rt/lib/asan/asan_report.h
+++ b/compiler-rt/lib/asan/asan_report.h
@@ -88,6 +88,9 @@ void ReportBadParamsToAnnotateDoubleEndedContiguousContainer(
uptr storage_beg, uptr storage_end, uptr old_container_beg,
uptr old_container_end, uptr new_container_beg, uptr new_container_end,
BufferedStackTrace *stack);
+void ReportBadParamsToCopyContiguousContainerAnnotations(
+ uptr old_storage_beg, uptr old_storage_end, uptr new_storage_beg,
+ uptr new_storage_end, BufferedStackTrace *stack);
void ReportODRViolation(const __asan_global *g1, u32 stack_id1,
const __asan_global *g2, u32 stack_id2);
diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_common_interface.inc b/compiler-rt/lib/sanitizer_common/sanitizer_common_interface.inc
index 66744aa021e6..4ea75cdd67cb 100644
--- a/compiler-rt/lib/sanitizer_common/sanitizer_common_interface.inc
+++ b/compiler-rt/lib/sanitizer_common/sanitizer_common_interface.inc
@@ -10,6 +10,7 @@
INTERFACE_FUNCTION(__sanitizer_acquire_crash_state)
INTERFACE_FUNCTION(__sanitizer_annotate_contiguous_container)
INTERFACE_FUNCTION(__sanitizer_annotate_double_ended_contiguous_container)
+INTERFACE_FUNCTION(__sanitizer_copy_contiguous_container_annotations)
INTERFACE_FUNCTION(__sanitizer_contiguous_container_find_bad_address)
INTERFACE_FUNCTION(
__sanitizer_double_ended_contiguous_container_find_bad_address)
diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_interface_internal.h b/compiler-rt/lib/sanitizer_common/sanitizer_interface_internal.h
index c424ab1cecf9..387a4d87d97b 100644
--- a/compiler-rt/lib/sanitizer_common/sanitizer_interface_internal.h
+++ b/compiler-rt/lib/sanitizer_common/sanitizer_interface_internal.h
@@ -76,6 +76,11 @@ void __sanitizer_annotate_double_ended_contiguous_container(
const void *old_container_beg, const void *old_container_end,
const void *new_container_beg, const void *new_container_end);
SANITIZER_INTERFACE_ATTRIBUTE
+void __sanitizer_copy_contiguous_container_annotations(const void *src_begin,
+ const void *src_end,
+ const void *dst_begin,
+ const void *dst_end);
+SANITIZER_INTERFACE_ATTRIBUTE
int __sanitizer_verify_contiguous_container(const void *beg, const void *mid,
const void *end);
SANITIZER_INTERFACE_ATTRIBUTE
diff --git a/compiler-rt/test/asan/TestCases/copy_container_annotations.cpp b/compiler-rt/test/asan/TestCases/copy_container_annotations.cpp
new file mode 100644
index 000000000000..ed20dc3e80d4
--- /dev/null
+++ b/compiler-rt/test/asan/TestCases/copy_container_annotations.cpp
@@ -0,0 +1,172 @@
+// RUN: %clangxx_asan -fexceptions -O %s -o %t && %env_asan_opts=detect_stack_use_after_return=0 %run %t
+//
+// Test __sanitizer_copy_contiguous_container_annotations.
+
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+#include
+#include
+#include
+
+static constexpr size_t kGranularity = 8;
+
+template static constexpr T RoundDown(T x) {
+ return reinterpret_cast(reinterpret_cast(x) &
+ ~(kGranularity - 1));
+}
+template static constexpr T RoundUp(T x) {
+ return reinterpret_cast(
+ RoundDown(reinterpret_cast(x) + kGranularity - 1));
+}
+
+static std::vector GetPoisonedState(char *begin, char *end) {
+ std::vector result;
+ for (char *ptr = begin; ptr != end; ++ptr) {
+ result.push_back(__asan_address_is_poisoned(ptr));
+ }
+ return result;
+}
+
+static void RandomPoison(char *beg, char *end) {
+ assert(beg == RoundDown(beg));
+ assert(end == RoundDown(end));
+ __asan_poison_memory_region(beg, end - beg);
+ for (beg = RoundUp(beg); beg < end; beg += kGranularity) {
+ __asan_unpoison_memory_region(beg, rand() % (kGranularity + 1));
+ }
+}
+
+template
+static void Test(size_t capacity, size_t off_src, size_t off_dst,
+ char *src_buffer_beg, char *src_buffer_end,
+ char *dst_buffer_beg, char *dst_buffer_end) {
+ size_t dst_buffer_size = dst_buffer_end - dst_buffer_beg;
+ char *src_beg = src_buffer_beg + off_src;
+ char *src_end = src_beg + capacity;
+
+ char *dst_beg = dst_buffer_beg + off_dst;
+ char *dst_end = dst_beg + capacity;
+ if (benchmark) {
+ __sanitizer_copy_contiguous_container_annotations(src_beg, src_end, dst_beg,
+ dst_end);
+ return;
+ }
+
+ std::vector src_poison_states =
+ GetPoisonedState(src_buffer_beg, src_buffer_end);
+ std::vector dst_poison_before =
+ GetPoisonedState(dst_buffer_beg, dst_buffer_end);
+ __sanitizer_copy_contiguous_container_annotations(src_beg, src_end, dst_beg,
+ dst_end);
+ std::vector dst_poison_after =
+ GetPoisonedState(dst_buffer_beg, dst_buffer_end);
+
+ // Create ideal copy of src over dst.
+ std::vector dst_poison_exp = dst_poison_before;
+ for (size_t cur = 0; cur < capacity; ++cur)
+ dst_poison_exp[off_dst + cur] = src_poison_states[off_src + cur];
+
+ // Unpoison prefixes of Asan granules.
+ for (size_t cur = dst_buffer_size - 1; cur > 0; --cur) {
+ if (cur % kGranularity != 0 && !dst_poison_exp[cur])
+ dst_poison_exp[cur - 1] = 0;
+ }
+
+ if (dst_poison_after != dst_poison_exp) {
+ std::cerr << "[" << off_dst << ", " << off_dst + capacity << ")\n";
+ for (size_t i = 0; i < dst_poison_after.size(); ++i) {
+ std::cerr << i << ":\t" << dst_poison_before[i] << "\t"
+ << dst_poison_after[i] << "\t" << dst_poison_exp[i] << "\n";
+ }
+ std::cerr << "----------\n";
+
+ assert(dst_poison_after == dst_poison_exp);
+ }
+}
+
+template
+static void TestNonOverlappingContainers(size_t capacity, size_t off_src,
+ size_t off_dst) {
+ // Test will copy [off_src, off_src + capacity) to [off_dst, off_dst + capacity).
+ // Allocate buffers to have additional granule before and after tested ranges.
+ off_src += kGranularity;
+ off_dst += kGranularity;
+ size_t src_buffer_size = RoundUp(off_src + capacity) + kGranularity;
+ size_t dst_buffer_size = RoundUp(off_dst + capacity) + kGranularity;
+
+ std::unique_ptr src_buffer =
+ std::make_unique(src_buffer_size);
+ std::unique_ptr dst_buffer =
+ std::make_unique(dst_buffer_size);
+
+ char *src_buffer_beg = src_buffer.get();
+ char *src_buffer_end = src_buffer_beg + src_buffer_size;
+ assert(RoundDown(src_buffer_beg) == src_buffer_beg);
+
+ char *dst_buffer_beg = dst_buffer.get();
+ char *dst_buffer_end = dst_buffer_beg + dst_buffer_size;
+ assert(RoundDown(dst_buffer_beg) == dst_buffer_beg);
+
+ for (int i = 0; i < 35; i++) {
+ if (!benchmark || !i) {
+ RandomPoison(src_buffer_beg, src_buffer_end);
+ RandomPoison(dst_buffer_beg, dst_buffer_end);
+ }
+
+ Test(capacity, off_src, off_dst, src_buffer_beg, src_buffer_end,
+ dst_buffer_beg, dst_buffer_end);
+ }
+
+ __asan_unpoison_memory_region(src_buffer_beg, src_buffer_size);
+ __asan_unpoison_memory_region(dst_buffer_beg, dst_buffer_size);
+}
+
+template
+static void TestOverlappingContainers(size_t capacity, size_t off_src,
+ size_t off_dst) {
+ // Test will copy [off_src, off_src + capacity) to [off_dst, off_dst + capacity).
+ // Allocate buffers to have additional granule before and after tested ranges.
+ off_src += kGranularity;
+ off_dst += kGranularity;
+ size_t buffer_size =
+ RoundUp(std::max(off_src, off_dst) + capacity) + kGranularity;
+
+ // Use unique_ptr with a custom deleter to manage the buffer
+ std::unique_ptr buffer = std::make_unique(buffer_size);
+
+ char *buffer_beg = buffer.get();
+ char *buffer_end = buffer_beg + buffer_size;
+ assert(RoundDown(buffer_beg) == buffer_beg);
+
+ for (int i = 0; i < 35; i++) {
+ if (!benchmark || !i)
+ RandomPoison(buffer_beg, buffer_end);
+ Test(capacity, off_src, off_dst, buffer_beg, buffer_end,
+ buffer_beg, buffer_end);
+ }
+
+ __asan_unpoison_memory_region(buffer_beg, buffer_size);
+}
+
+int main(int argc, char **argv) {
+ int n = argc == 1 ? 64 : atoi(argv[1]);
+ for (size_t off_src = 0; off_src < kGranularity; off_src++) {
+ for (size_t off_dst = 0; off_dst < kGranularity; off_dst++) {
+ for (int capacity = 0; capacity <= n; capacity++) {
+ if (n < 1024) {
+ TestNonOverlappingContainers(capacity, off_src, off_dst);
+ TestOverlappingContainers(capacity, off_src, off_dst);
+ } else {
+ TestNonOverlappingContainers(capacity, off_src, off_dst);
+ TestOverlappingContainers(capacity, off_src, off_dst);
+ }
+ }
+ }
+ }
+}
\ No newline at end of file