From 021523661390c13bb3514399faa864a6d1f8cbb3 Mon Sep 17 00:00:00 2001
From: KRM7 <70973547+KRM7@users.noreply.github.com>
Date: Sun, 18 Feb 2024 12:15:11 +0100
Subject: [PATCH] support array types
---
.github/workflows/sanitizers.yml | 2 +
core-guidelines.ruleset | 35 -------
src/small_unique_ptr.hpp | 168 +++++++++++++++++++++++--------
test/small_unique_ptr.cpp | 113 ++++++++++++++++++++-
4 files changed, 235 insertions(+), 83 deletions(-)
delete mode 100644 core-guidelines.ruleset
diff --git a/.github/workflows/sanitizers.yml b/.github/workflows/sanitizers.yml
index b36145c..bf6199c 100644
--- a/.github/workflows/sanitizers.yml
+++ b/.github/workflows/sanitizers.yml
@@ -12,6 +12,8 @@ jobs:
include:
- cxx: clang++-15
pkgs: clang-15 llvm-15
+ - cxx: g++-12
+ pkgs: g++-12
env:
ASAN_OPTIONS: check_initialization_order=1:strict_init_order=1:detect_stack_use_after_return=1:detect_leaks=1:detect_invalid_pointer_pairs=2
diff --git a/core-guidelines.ruleset b/core-guidelines.ruleset
deleted file mode 100644
index f379195..0000000
--- a/core-guidelines.ruleset
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/src/small_unique_ptr.hpp b/src/small_unique_ptr.hpp
index 2690e50..1463afd 100644
--- a/src/small_unique_ptr.hpp
+++ b/src/small_unique_ptr.hpp
@@ -55,16 +55,6 @@ namespace detail
inline constexpr bool is_proper_base_of_v = is_proper_base_of::value;
- template
- struct is_complete : std::false_type {};
-
- template
- struct is_complete> : std::true_type {};
-
- template
- inline constexpr bool is_complete_v = is_complete::value;
-
-
inline constexpr std::size_t small_ptr_size = 64;
@@ -78,6 +68,12 @@ namespace detail
static constexpr std::size_t value = std::has_virtual_destructor_v ? dynamic_buffer_size : static_buffer_size;
};
+ template
+ struct buffer_size
+ {
+ static constexpr std::size_t value = small_ptr_size - sizeof(T*);
+ };
+
template
inline constexpr std::size_t buffer_size_v = buffer_size::value;
@@ -97,12 +93,32 @@ namespace detail
template
- struct is_always_heap_allocated
+ struct buffer_elements {};
+
+ template
+ struct buffer_elements
+ {
+ static constexpr std::size_t value = buffer_size_v / sizeof(T);
+ };
+
+ template
+ inline constexpr std::size_t buffer_elements_v = buffer_elements::value;
+
+
+ template
+ struct is_always_heap_allocated // TODO: cleanup?
{
static constexpr bool value = (sizeof(T) > buffer_size_v) || (alignof(T) > buffer_alignment_v) ||
(!std::is_abstract_v && !std::is_nothrow_move_constructible_v>);
};
+ template
+ struct is_always_heap_allocated
+ {
+ static constexpr bool value = (sizeof(T) > buffer_size_v) || (alignof(T) > buffer_alignment_v) ||
+ !std::is_nothrow_move_constructible_v>;
+ };
+
template
inline constexpr bool is_always_heap_allocated_v = is_always_heap_allocated::value;
@@ -112,17 +128,17 @@ namespace detail
{
using pointer = std::remove_cv_t*;
using buffer_t = unsigned char[buffer_size_v];
- using move_fn = void(*)(void* src, void* dst) noexcept;
+ using move_fn = void(*)(void*, void*) noexcept;
pointer buffer(std::ptrdiff_t offset = 0) const noexcept
{
- return std::launder(reinterpret_cast(static_cast(buffer_) + offset));
+ return reinterpret_cast(static_cast(buffer_) + offset);
}
template
void move_buffer_to(small_unique_ptr_base& dst) noexcept
{
- move_(buffer(), dst.buffer());
+ move_(std::launder(buffer()), dst.buffer());
dst.move_ = move_;
}
@@ -131,7 +147,7 @@ namespace detail
return static_cast(move_);
}
- alignas(buffer_alignment_v) mutable buffer_t buffer_;
+ alignas(buffer_alignment_v) mutable buffer_t buffer_ = {};
T* data_ = nullptr;
move_fn move_ = nullptr;
};
@@ -142,25 +158,25 @@ namespace detail
{
static constexpr bool is_stack_allocated() noexcept { return false; }
- T* data_ = nullptr;
+ std::remove_extent_t* data_ = nullptr;
};
template
- requires(!is_always_heap_allocated_v && !std::is_polymorphic_v)
+ requires(!is_always_heap_allocated_v && !std::is_polymorphic_v && !std::is_array_v)
struct small_unique_ptr_base
{
using pointer = std::remove_cv_t*;
using buffer_t = unsigned char[buffer_size_v];
- pointer buffer(std::ptrdiff_t offset = 0) const noexcept
+ pointer buffer(std::ptrdiff_t = 0) const noexcept
{
- return std::launder(reinterpret_cast(static_cast(buffer_) + offset));
+ return reinterpret_cast(static_cast(buffer_));
}
template
void move_buffer_to(small_unique_ptr_base& dst) noexcept
{
- std::construct_at(dst.buffer(), std::move(*buffer()));
+ std::construct_at(dst.buffer(), std::move(*std::launder(buffer())));
}
constexpr bool is_stack_allocated() const noexcept
@@ -168,7 +184,7 @@ namespace detail
return !std::is_constant_evaluated() && (data_ == buffer());
}
- alignas(buffer_alignment_v) mutable buffer_t buffer_;
+ alignas(buffer_alignment_v) mutable buffer_t buffer_ = {};
T* data_ = nullptr;
};
@@ -181,7 +197,7 @@ namespace detail
pointer buffer(std::ptrdiff_t offset = 0) const noexcept
{
- return std::launder(reinterpret_cast(static_cast(buffer_) + offset));
+ return reinterpret_cast(static_cast(buffer_) + offset);
}
template
@@ -196,19 +212,46 @@ namespace detail
{
if (std::is_constant_evaluated()) return false;
- const volatile unsigned char* data = reinterpret_cast(data_);
- const volatile unsigned char* buffer_first = static_cast(buffer_);
- const volatile unsigned char* buffer_last = buffer_first + buffer_size_v;
+ auto* data = reinterpret_cast(data_);
+ auto* buffer_first = static_cast(buffer_);
+ auto* buffer_last = buffer_first + buffer_size_v;
assert(reinterpret_cast(buffer_last) - reinterpret_cast(buffer_first) == buffer_size_v);
return std::less_equal{}(buffer_first, data) && std::less{}(data, buffer_last);
}
- alignas(buffer_alignment_v) mutable buffer_t buffer_;
+ alignas(buffer_alignment_v) mutable buffer_t buffer_ = {};
T* data_ = nullptr;
};
+ template
+ requires(!is_always_heap_allocated_v && std::is_array_v)
+ struct small_unique_ptr_base
+ {
+ using pointer = std::remove_cv_t>*;
+ using buffer_t = unsigned char[buffer_size_v];
+
+ pointer buffer(std::ptrdiff_t = 0) const noexcept
+ {
+ return reinterpret_cast(static_cast(buffer_));
+ }
+
+ template
+ void move_buffer_to(small_unique_ptr_base& dst) noexcept
+ {
+ std::uninitialized_move(std::launder(buffer()), buffer() + buffer_elements_v, dst.buffer());
+ }
+
+ constexpr bool is_stack_allocated() const noexcept
+ {
+ return !std::is_constant_evaluated() && (data_ == buffer());
+ }
+
+ alignas(buffer_alignment_v) mutable buffer_t buffer_ = {};
+ std::remove_extent_t* data_ = nullptr;
+ };
+
struct make_unique_small_impl;
} // namespace detail
@@ -218,11 +261,11 @@ template
class small_unique_ptr : private detail::small_unique_ptr_base
{
public:
- static_assert(detail::is_complete_v && !std::is_array_v);
+ static_assert(!std::is_bounded_array_v);
- using element_type = T;
- using pointer = T*;
- using reference = T&;
+ using element_type = std::remove_extent_t;
+ using pointer = std::remove_extent_t*;
+ using reference = std::remove_extent_t&;
struct constructor_tag_t {};
@@ -246,7 +289,7 @@ class small_unique_ptr : private detail::small_unique_ptr_base
if constexpr (!detail::is_always_heap_allocated_v) // other.is_stack_allocated()
{
other.move_buffer_to(*this);
- this->data_ = this->buffer(other.template offsetof_base());
+ this->data_ = std::launder(this->buffer(other.template offsetof_base()));
other.reset();
}
}
@@ -272,7 +315,7 @@ class small_unique_ptr : private detail::small_unique_ptr_base
{
reset();
other.move_buffer_to(*this);
- this->data_ = this->buffer(other.template offsetof_base());
+ this->data_ = std::launder(this->buffer(other.template offsetof_base()));
other.reset();
}
return *this;
@@ -286,12 +329,12 @@ class small_unique_ptr : private detail::small_unique_ptr_base
constexpr ~small_unique_ptr() noexcept
{
- is_stack_allocated() ? std::destroy_at(this->data_) : delete this->data_;
+ destroy();
}
constexpr void reset(pointer new_data = pointer{}) noexcept
{
- is_stack_allocated() ? std::destroy_at(this->data_) : delete this->data_;
+ destroy();
if constexpr (requires { small_unique_ptr::move_; }) this->move_ = nullptr;
this->data_ = new_data;
}
@@ -314,26 +357,26 @@ class small_unique_ptr : private detail::small_unique_ptr_base
detail::small_unique_ptr_base temp;
other.move_buffer_to(temp);
- temp.data_ = temp.buffer(other_offset);
+ temp.data_ = std::launder(temp.buffer(other_offset));
std::destroy_at(other.data_);
this->move_buffer_to(other);
- other.data_ = other.buffer(this_offset);
+ other.data_ = std::launder(other.buffer(this_offset));
std::destroy_at(this->data_);
temp.move_buffer_to(*this);
- this->data_ = this->buffer(other_offset);
+ this->data_ = std::launder(this->buffer(other_offset));
std::destroy_at(temp.data_);
}
else if (!is_stack_allocated() && other.is_stack_allocated())
{
- const pointer new_data = this->buffer(other.offsetof_base());
+ const pointer new_data = std::launder(this->buffer(other.offsetof_base()));
other.move_buffer_to(*this);
other.reset(std::exchange(this->data_, new_data));
}
else /* if (is_stack_allocated() && !other.is_stack_allocated()) */
{
- const pointer new_data = other.buffer(this->offsetof_base());
+ const pointer new_data = std::launder(other.buffer(this->offsetof_base()));
this->move_buffer_to(other);
this->reset(std::exchange(other.data_, new_data));
}
@@ -374,19 +417,26 @@ class small_unique_ptr : private detail::small_unique_ptr_base
}
[[nodiscard]]
- constexpr reference operator*() const noexcept(detail::is_nothrow_dereferenceable_v)
+ constexpr reference operator*() const noexcept(detail::is_nothrow_dereferenceable_v) requires(!std::is_array_v)
{
assert(this->data_);
return *this->data_;
}
[[nodiscard]]
- constexpr pointer operator->() const noexcept
+ constexpr pointer operator->() const noexcept requires(!std::is_array_v)
{
assert(this->data_);
return this->data_;
}
+ [[nodiscard]]
+ constexpr reference operator[](std::size_t idx) const requires(std::is_array_v)
+ {
+ assert(this->data_);
+ return this->data_[idx];
+ }
+
constexpr bool operator==(std::nullptr_t) const noexcept
{
return this->data_ == pointer{ nullptr };
@@ -427,11 +477,23 @@ class small_unique_ptr : private detail::small_unique_ptr_base
if (!is_stack_allocated()) return 0;
const auto derived_ptr = reinterpret_cast(this->buffer());
- const auto base_ptr = reinterpret_cast(static_cast(this->data_));
+ const auto base_ptr = reinterpret_cast(static_cast*>(this->data_)); // TODO: ugly code
return base_ptr - derived_ptr;
}
+ constexpr void destroy() noexcept // TODO: cleanup?
+ {
+ if constexpr (!std::is_array_v)
+ {
+ is_stack_allocated() ? std::destroy_at(this->data_) : delete this->data_;
+ }
+ else
+ {
+ is_stack_allocated() ? std::destroy(this->data_, this->data_ + detail::buffer_elements_v) : delete[] this->data_;
+ }
+ }
+
template
friend class small_unique_ptr;
@@ -456,6 +518,7 @@ namespace detail
struct make_unique_small_impl
{
template
+ requires(!std::is_array_v)
static constexpr small_unique_ptr invoke(Args&&... args)
noexcept(std::is_nothrow_constructible_v && !detail::is_always_heap_allocated_v)
{
@@ -477,12 +540,31 @@ namespace detail
return ptr;
}
+
+ template
+ requires(std::is_unbounded_array_v)
+ static constexpr small_unique_ptr invoke(std::size_t count) // TODO: think about what happens if count == 0
+ {
+ small_unique_ptr ptr;
+
+ if (detail::is_always_heap_allocated_v || (count > detail::buffer_elements_v) || std::is_constant_evaluated())
+ {
+ ptr.data_ = new std::remove_extent_t[count](); // TODO: with () this isnt constexpr under msvc?
+ }
+ else if constexpr (!detail::is_always_heap_allocated_v)
+ {
+ std::uninitialized_value_construct(ptr.buffer(), ptr.buffer() + detail::buffer_elements_v);
+ ptr.data_ = ptr.buffer();
+ }
+
+ return ptr;
+ }
};
} // namespace detail
template
-[[nodiscard]] constexpr small_unique_ptr make_unique_small(Args&&... args)
+[[nodiscard]] constexpr small_unique_ptr make_unique_small(Args&&... args) // TODO: add array overload?
noexcept(std::is_nothrow_constructible_v && !detail::is_always_heap_allocated_v)
{
return detail::make_unique_small_impl::invoke(std::forward(args)...);
diff --git a/test/small_unique_ptr.cpp b/test/small_unique_ptr.cpp
index 724d750..eb14b49 100644
--- a/test/small_unique_ptr.cpp
+++ b/test/small_unique_ptr.cpp
@@ -78,6 +78,9 @@ TEST_CASE("object_layout", "[small_unique_ptr]")
STATIC_REQUIRE(std::is_standard_layout_v>);
STATIC_REQUIRE(std::is_standard_layout_v>);
+ STATIC_REQUIRE(std::is_standard_layout_v>);
+ STATIC_REQUIRE(std::is_standard_layout_v>);
+
STATIC_REQUIRE(std::is_standard_layout_v>);
STATIC_REQUIRE(std::is_standard_layout_v>);
STATIC_REQUIRE(std::is_standard_layout_v>);
@@ -96,6 +99,13 @@ TEST_CASE("object_size", "[small_unique_ptr]")
STATIC_REQUIRE(alignof(small_unique_ptr) == alignof(void*));
+ STATIC_REQUIRE(sizeof(small_unique_ptr) == detail::small_ptr_size);
+ STATIC_REQUIRE(sizeof(small_unique_ptr) == sizeof(void*));
+
+ STATIC_REQUIRE(alignof(small_unique_ptr) == alignof(void*));
+ STATIC_REQUIRE(alignof(small_unique_ptr) == alignof(void*));
+
+
STATIC_REQUIRE(sizeof(small_unique_ptr) == detail::small_ptr_size);
STATIC_REQUIRE(sizeof(small_unique_ptr) == sizeof(void*));
@@ -115,6 +125,9 @@ TEST_CASE("stack_buffer_size", "[small_unique_ptr]")
STATIC_REQUIRE(small_unique_ptr::stack_buffer_size() == sizeof(SmallPOD));
STATIC_REQUIRE(small_unique_ptr::stack_buffer_size() == 0);
+ STATIC_REQUIRE(small_unique_ptr::stack_buffer_size() != 0);
+ STATIC_REQUIRE(small_unique_ptr::stack_buffer_size() == 0);
+
STATIC_REQUIRE(small_unique_ptr::stack_buffer_size() == 0);
STATIC_REQUIRE(small_unique_ptr::stack_buffer_size() == 0);
@@ -122,12 +135,14 @@ TEST_CASE("stack_buffer_size", "[small_unique_ptr]")
}
TEST_CASE("stack_buffer_size_archdep", "[small_unique_ptr][!mayfail]")
-{
+{
REQUIRE(small_unique_ptr::stack_buffer_size() == 48);
REQUIRE(small_unique_ptr::stack_buffer_size() == 56);
+
+ REQUIRE(small_unique_ptr::stack_buffer_size() == 56);
}
-TEMPLATE_TEST_CASE("construction", "[small_unique_ptr]", SmallPOD, LargePOD, Base, SmallDerived, LargeDerived, BaseIntrusive, SmallIntrusive, LargeIntrusive)
+TEMPLATE_TEST_CASE("construction_scalar", "[small_unique_ptr]", SmallPOD, LargePOD, Base, SmallDerived, LargeDerived, BaseIntrusive, SmallIntrusive, LargeIntrusive)
{
STATIC_REQUIRE( std::invoke([]{ (void) small_unique_ptr(); return true; }) );
STATIC_REQUIRE( std::invoke([]{ (void) small_unique_ptr(); return true; }) );
@@ -143,6 +158,24 @@ TEMPLATE_TEST_CASE("construction", "[small_unique_ptr]", SmallPOD, LargePOD, Bas
SUCCEED();
}
+TEMPLATE_TEST_CASE("construction_array", "[small_unique_ptr]", SmallPOD, LargePOD)
+{
+ STATIC_REQUIRE( std::invoke([]{ (void) small_unique_ptr(); return true; }) );
+ STATIC_REQUIRE( std::invoke([]{ (void) small_unique_ptr(); return true; }) );
+ STATIC_REQUIRE( std::invoke([]{ (void) small_unique_ptr(nullptr); return true; }) );
+ STATIC_REQUIRE( std::invoke([]{ (void) small_unique_ptr(nullptr); return true; }) );
+
+ STATIC_REQUIRE( std::invoke([]{ (void) make_unique_small(2); return true; }));
+ STATIC_REQUIRE( std::invoke([]{ (void) make_unique_small(2); return true; }));
+
+ (void) make_unique_small(2);
+ (void) make_unique_small(2);
+
+ (void) make_unique_small(0);
+
+ SUCCEED();
+}
+
TEST_CASE("noexcept_construction", "[small_unique_ptr]")
{
STATIC_REQUIRE(noexcept(make_unique_small()));
@@ -154,6 +187,9 @@ TEST_CASE("is_always_heap_allocated", "[small_unique_ptr]")
STATIC_REQUIRE(!small_unique_ptr::is_always_heap_allocated());
STATIC_REQUIRE(small_unique_ptr::is_always_heap_allocated());
+ STATIC_REQUIRE(!small_unique_ptr::is_always_heap_allocated());
+ STATIC_REQUIRE(small_unique_ptr::is_always_heap_allocated());
+
STATIC_REQUIRE(!small_unique_ptr::is_always_heap_allocated());
STATIC_REQUIRE(small_unique_ptr::is_always_heap_allocated());
@@ -172,6 +208,9 @@ TEST_CASE("is_stack_allocated", "[small_unique_ptr]")
STATIC_REQUIRE( !std::invoke([]{ return make_unique_small().is_stack_allocated(); }) );
STATIC_REQUIRE( !std::invoke([]{ return make_unique_small().is_stack_allocated(); }) );
+ STATIC_REQUIRE( !std::invoke([] { return make_unique_small(2).is_stack_allocated(); }) );
+ STATIC_REQUIRE( !std::invoke([] { return make_unique_small(2).is_stack_allocated(); }) );
+
small_unique_ptr p1 = make_unique_small();
small_unique_ptr p2 = make_unique_small();
REQUIRE(p1.is_stack_allocated());
@@ -187,6 +226,11 @@ TEST_CASE("is_stack_allocated", "[small_unique_ptr]")
REQUIRE(p5.is_stack_allocated());
REQUIRE(!p6.is_stack_allocated());
+ small_unique_ptr p7 = make_unique_small(3);
+ small_unique_ptr p8 = make_unique_small(1);
+ REQUIRE(p7.is_stack_allocated());
+ REQUIRE(!p8.is_stack_allocated());
+
small_unique_ptr np(nullptr);
REQUIRE(!np.is_stack_allocated());
}
@@ -251,7 +295,7 @@ TEST_CASE("move_construct_plain", "[small_unique_ptr]")
STATIC_REQUIRE(64 == std::invoke([] { small_unique_ptr p = make_unique_small(); return p->padding(); }));
STATIC_REQUIRE( std::invoke([] { small_unique_ptr p = make_unique_small(); return true; }) );
- STATIC_REQUIRE( std::invoke([] { small_unique_ptr p = make_unique_small(); return true; } ));
+ STATIC_REQUIRE( std::invoke([] { small_unique_ptr p = make_unique_small(); return true; }) );
small_unique_ptr base1 = make_unique_small();
@@ -278,6 +322,19 @@ TEST_CASE("move_construct_plain", "[small_unique_ptr]")
SUCCEED();
}
+TEST_CASE("move_construct_array", "[small_unique_ptr]")
+{
+ STATIC_REQUIRE( std::invoke([] { small_unique_ptr p = make_unique_small(4); return true; }) );
+ STATIC_REQUIRE( std::invoke([] { small_unique_ptr p = make_unique_small(2); return true; }) );
+
+ small_unique_ptr cpod1 = make_unique_small(4);
+ small_unique_ptr cpod2 = make_unique_small(2);
+
+ small_unique_ptr cpod3 = make_unique_small(0);
+ small_unique_ptr cpod4 = make_unique_small(0);
+ SUCCEED();
+}
+
TEST_CASE("move_assignment_plain", "[small_unique_ptr]")
{
STATIC_REQUIRE(32 == std::invoke([] { small_unique_ptr p; p = make_unique_small(); return p->padding(); }));
@@ -319,6 +376,16 @@ TEST_CASE("move_assignment_plain", "[small_unique_ptr]")
SUCCEED();
}
+TEST_CASE("move_assignment_array", "[small_unique_ptr]")
+{
+ STATIC_REQUIRE( std::invoke([] { small_unique_ptr p; p = make_unique_small(4); return true; }) );
+ STATIC_REQUIRE( std::invoke([] { small_unique_ptr p; p = make_unique_small(4); return true; }) );
+
+ small_unique_ptr cpod1; cpod1 = make_unique_small(4);
+ small_unique_ptr cpod2; cpod2 = make_unique_small(4);
+ SUCCEED();
+}
+
TEST_CASE("swap_pod", "[small_unique_ptr]")
{
small_unique_ptr p1 = nullptr;
@@ -331,6 +398,23 @@ TEST_CASE("swap_pod", "[small_unique_ptr]")
REQUIRE(p1 != nullptr);
}
+TEST_CASE("swap_array", "[small_unique_ptr]")
+{
+ small_unique_ptr p1 = nullptr;
+ small_unique_ptr p2 = make_unique_small(3);
+
+ using std::swap;
+ swap(p1, p2);
+
+ REQUIRE(p2 == nullptr);
+ REQUIRE(p1[2].value() == 32);
+
+ swap(p1, p2);
+
+ REQUIRE(p1 == nullptr);
+ REQUIRE(p2[1].value() == 32);
+}
+
TEST_CASE("swap_large", "[small_unique_ptr]")
{
small_unique_ptr p1 = nullptr;
@@ -506,6 +590,16 @@ TEST_CASE("constexpr_swap", "[small_unique_ptr]")
return p2->padding();
}));
+
+ STATIC_REQUIRE(32 == std::invoke([]
+ {
+ small_unique_ptr p1 = make_unique_small(2);
+ small_unique_ptr p2 = make_unique_small(4);
+
+ swap(p1, p2);
+
+ return p1[3].padding();
+ }));
}
struct A { virtual ~A() = default; };
@@ -621,7 +715,7 @@ TEST_CASE("abstract_base", "[small_unique_ptr]")
REQUIRE(p == nullptr);
}
-TEST_CASE("simple_alignment", "[small_unique_ptr]")
+TEST_CASE("alignment_simple", "[small_unique_ptr]")
{
small_unique_ptr ps = make_unique_small();
small_unique_ptr pl = make_unique_small();
@@ -630,7 +724,16 @@ TEST_CASE("simple_alignment", "[small_unique_ptr]")
REQUIRE((std::bit_cast(std::addressof(*pl)) % alignof(LargePOD)) == 0);
}
-TEST_CASE("poly_alignment", "[small_unique_ptr]")
+TEST_CASE("alignment_array", "[small_unique_ptr]")
+{
+ small_unique_ptr ps = make_unique_small(4);
+ small_unique_ptr pl = make_unique_small(2);
+
+ REQUIRE((std::bit_cast(std::addressof(ps[0])) % alignof(SmallPOD)) == 0);
+ REQUIRE((std::bit_cast(std::addressof(pl[0])) % alignof(LargePOD)) == 0);
+}
+
+TEST_CASE("alignment_poly", "[small_unique_ptr]")
{
struct alignas(16) SmallAlign { virtual ~SmallAlign() = default; };
struct alignas(128) LargeAlign : SmallAlign {};