diff --git a/stl/inc/any b/stl/inc/any index 9f776996ac0..2f33cb0545d 100644 --- a/stl/inc/any +++ b/stl/inc/any @@ -158,6 +158,20 @@ public: ~any() noexcept { reset(); + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + // The two least significant bits (LSBs) of _Storage._TypeData encode whether we're in _Small, _Big, + // or _Trivial mode; see _Rep(). The rest of the bits encode the type_info pointer that's used + // to remember what type of object we're storing; see _TypeInfo(). So, we'll take the tombstone value, + // clear out the two LSBs with the _Rep_mask, then set the bits for _Small mode. + // _Small mode will prevent the most misuse, as the pseudo-vtable will be used for _Destroy, _Copy, and _Move. + // _Big mode wouldn't use the pseudo-vtable for _Move, and _Trivial mode doesn't have a pseudo-vtable at all. + const uintptr_t _Tombstone_value{_MSVC_STL_UINTPTR_TOMBSTONE_VALUE}; + // Set the pseudo-vtable pointer: + _Storage._SmallStorage._RTTI = reinterpret_cast(_Tombstone_value); + // Set the combined type_info pointer and representation mode bits: + _Storage._TypeData = (_Tombstone_value & ~_Rep_mask) | static_cast(_Any_representation::_Small); +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES } // Assignment [any.assign] diff --git a/stl/inc/deque b/stl/inc/deque index b6fcee81c38..05793b8ac41 100644 --- a/stl/inc/deque +++ b/stl/inc/deque @@ -555,6 +555,18 @@ public: _Deque_val() noexcept : _Map(), _Mapsize(0), _Myoff(0), _Mysize(0) {} +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + ~_Deque_val() noexcept { + if constexpr (is_pointer_v<_Mapptr>) { + const auto _Tombstone{reinterpret_cast<_Mapptr>(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Map = _Tombstone; + _Mapsize = 0; + _Myoff = 0; + _Mysize = 0; + } + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES + _Map_difference_type _Getblock(size_type _Off) const noexcept { // NB: _Mapsize and _Block_size are guaranteed to be powers of 2 return static_cast<_Map_difference_type>((_Off / _Block_size) & (_Mapsize - 1)); diff --git a/stl/inc/exception b/stl/inc/exception index 3d07bb824c6..9cf770faff2 100644 --- a/stl/inc/exception +++ b/stl/inc/exception @@ -234,6 +234,12 @@ public: ~exception_ptr() noexcept { __ExceptionPtrDestroy(this); + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + const auto _Tombstone{reinterpret_cast(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Data1 = _Tombstone; + _Data2 = _Tombstone; +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES } exception_ptr(const exception_ptr& _Rhs) noexcept { diff --git a/stl/inc/forward_list b/stl/inc/forward_list index f0fb5398ef5..7d7dfb8db11 100644 --- a/stl/inc/forward_list +++ b/stl/inc/forward_list @@ -277,6 +277,15 @@ public: _Flist_val() noexcept : _Myhead() {} // initialize data +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + ~_Flist_val() noexcept { + if constexpr (is_pointer_v<_Nodeptr>) { + const auto _Tombstone{reinterpret_cast<_Nodeptr>(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Myhead = _Tombstone; + } + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES + _Nodeptr _Before_head() const noexcept { // return pointer to the "before begin" pseudo node return pointer_traits<_Nodeptr>::pointer_to(reinterpret_cast<_Node&>(const_cast<_Nodeptr&>(_Myhead))); } diff --git a/stl/inc/functional b/stl/inc/functional index 33d51be9a3b..e386dc35c1f 100644 --- a/stl/inc/functional +++ b/stl/inc/functional @@ -928,6 +928,11 @@ public: ~_Func_class() noexcept { _Tidy(); + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + const auto _Tombstone{reinterpret_cast<_Ptrt*>(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Set(_Tombstone); +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES } protected: @@ -1934,6 +1939,11 @@ public: // Do cleanup in this class destructor rather than base, // so that if object construction throws, the unnecessary cleanup isn't called. this->_Checked_destroy(this->_Data); + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + const auto _Tombstone{reinterpret_cast(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + this->_Data._Impl = _Tombstone; +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES } move_only_function& operator=(nullptr_t) noexcept { diff --git a/stl/inc/list b/stl/inc/list index 1d67c2e4810..446649dcef4 100644 --- a/stl/inc/list +++ b/stl/inc/list @@ -352,6 +352,16 @@ public: _List_val() noexcept : _Myhead(), _Mysize(0) {} // initialize data +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + ~_List_val() noexcept { + if constexpr (is_pointer_v<_Nodeptr>) { + const auto _Tombstone{reinterpret_cast<_Nodeptr>(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Myhead = _Tombstone; + _Mysize = 0; + } + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES + void _Orphan_ptr2(_Nodeptr _Ptr) noexcept { // orphan iterators with specified node pointers #if _ITERATOR_DEBUG_LEVEL == 2 _Lockit _Lock(_LOCK_DEBUG); diff --git a/stl/inc/memory b/stl/inc/memory index 3770d48706d..883a16076f7 100644 --- a/stl/inc/memory +++ b/stl/inc/memory @@ -1311,7 +1311,15 @@ protected: constexpr _Ptr_base() noexcept = default; +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + ~_Ptr_base() noexcept { + const uintptr_t _Tombstone_value{_MSVC_STL_UINTPTR_TOMBSTONE_VALUE}; + _Ptr = reinterpret_cast(_Tombstone_value); + _Rep = reinterpret_cast<_Ref_count_base*>(_Tombstone_value); + } +#else // ^^^ _MSVC_STL_DESTRUCTOR_TOMBSTONES / !_MSVC_STL_DESTRUCTOR_TOMBSTONES vvv ~_Ptr_base() = default; +#endif // ^^^ !_MSVC_STL_DESTRUCTOR_TOMBSTONES ^^^ template void _Move_construct_from(_Ptr_base<_Ty2>&& _Right) noexcept { @@ -3418,6 +3426,15 @@ public: if (_Mypair._Myval2) { _Mypair._Get_first()(_Mypair._Myval2); } + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + if constexpr (is_pointer_v) { + if (!_STD _Is_constant_evaluated()) { + const auto _Tombstone{reinterpret_cast(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Mypair._Myval2 = _Tombstone; + } + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES } _NODISCARD _CONSTEXPR23 _Dx& get_deleter() noexcept { @@ -3555,6 +3572,15 @@ public: if (_Mypair._Myval2) { _Mypair._Get_first()(_Mypair._Myval2); } + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + if constexpr (is_pointer_v) { + if (!_STD _Is_constant_evaluated()) { + const auto _Tombstone{reinterpret_cast(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Mypair._Myval2 = _Tombstone; + } + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES } _NODISCARD _CONSTEXPR23 _Dx& get_deleter() noexcept { diff --git a/stl/inc/optional b/stl/inc/optional index 196374892eb..14cf3f65127 100644 --- a/stl/inc/optional +++ b/stl/inc/optional @@ -88,6 +88,9 @@ struct _Optional_destruct_base { // either contains a value of _Ty or is empty ( : _Value(_STD invoke(_STD forward<_Fn>(_Func), _STD forward<_Ux>(_Arg))), _Has_value{true} {} #endif // _HAS_CXX23 + // For the trivially destructible case, we can't add a destructor for _MSVC_STL_DESTRUCTOR_TOMBSTONES, due to + // N5001 [optional.dtor]/2: "Remarks: If is_trivially_destructible_v is true, then this destructor is trivial." + _CONSTEXPR20 void reset() noexcept { _Has_value = false; } @@ -104,6 +107,13 @@ struct _Optional_destruct_base<_Ty, false> { // either contains a value of _Ty o _CONSTEXPR20 ~_Optional_destruct_base() noexcept { if (_Has_value) { _Value.~_Ty(); + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + // For the non-trivially destructible case, we can set the optional to be empty. + // We don't attempt to scribble over the bytes of the object's storage because that could be expensive + // and we don't know whether the object has an invalid representation, much less what it could be. + _Has_value = false; +#endif } } diff --git a/stl/inc/regex b/stl/inc/regex index d4988266506..27b63c11e2d 100644 --- a/stl/inc/regex +++ b/stl/inc/regex @@ -1923,6 +1923,11 @@ public: ~basic_regex() noexcept { _Tidy(); + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + const auto _Tombstone{reinterpret_cast<_Root_node*>(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Rep = _Tombstone; +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES } basic_regex& operator=(const basic_regex& _Right) { diff --git a/stl/inc/valarray b/stl/inc/valarray index bb30c016487..a6a0909d9a8 100644 --- a/stl/inc/valarray +++ b/stl/inc/valarray @@ -137,6 +137,14 @@ public: ~valarray() noexcept { _Tidy_deallocate(); + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + const auto _Tombstone{reinterpret_cast<_Ty*>(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Myptr = _Tombstone; + + // _Tidy_deallocate() sets _Mysize to 0, so we don't need to set it here. + _STL_INTERNAL_CHECK(_Mysize == 0); +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES } valarray& operator=(const valarray& _Right) { diff --git a/stl/inc/variant b/stl/inc/variant index 8d472e9ca24..0679d94022d 100644 --- a/stl/inc/variant +++ b/stl/inc/variant @@ -756,6 +756,10 @@ public: _Which{static_cast<_Index_t>(_Idx)} { // initialize alternative _Idx from _Args... } + // For the trivially destructible case, we can't add a destructor for _MSVC_STL_DESTRUCTOR_TOMBSTONES, due to + // N5001 [variant.dtor]/2: "Remarks: If is_trivially_destructible_v is true for all Ti, + // then this destructor is trivial." + _NODISCARD constexpr bool valueless_by_exception() const noexcept { // does this variant NOT hold a value? return _Which < 0; } @@ -839,6 +843,13 @@ struct _Variant_destroy_layer_ : _Variant_base<_Types...> { // destruction behav _CONSTEXPR20 ~_Variant_destroy_layer_() noexcept { // Destroy contained value, if any this->_Destroy(); + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + // For the non-trivially destructible case, we can set the variant to be valueless. + // We don't attempt to scribble over the bytes of the object's storage because that could be expensive + // and we don't know whether the object has an invalid representation, much less what it could be. + this->_Set_index(variant_npos); +#endif } _Variant_destroy_layer_() = default; diff --git a/stl/inc/vector b/stl/inc/vector index 1450120f061..a4dcee2cfbe 100644 --- a/stl/inc/vector +++ b/stl/inc/vector @@ -402,6 +402,19 @@ public: _CONSTEXPR20 _Vector_val(pointer _First, pointer _Last, pointer _End) noexcept : _Myfirst(_First), _Mylast(_Last), _Myend(_End) {} +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + _CONSTEXPR20 ~_Vector_val() noexcept { + if constexpr (is_pointer_v) { + if (!_STD _Is_constant_evaluated()) { + const auto _Tombstone{reinterpret_cast(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Myfirst = _Tombstone; + _Mylast = _Tombstone; + _Myend = _Tombstone; + } + } + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES + _CONSTEXPR20 void _Swap_val(_Vector_val& _Right) noexcept { this->_Swap_proxy_and_iterators(_Right); swap(_Myfirst, _Right._Myfirst); // intentional ADL @@ -2851,6 +2864,12 @@ public: auto&& _Alproxy = _GET_PROXY_ALLOCATOR(_Alvbase, this->_Getal()); _Delete_plain_internal(_Alproxy, _STD exchange(this->_Myproxy, nullptr)); #endif // _ITERATOR_DEBUG_LEVEL != 0 + +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + // Destroying _Myvec will store tombstones in its pointers, setting its apparent size to 0. + // We should also set our _Mysize to 0 for consistency, allowing precondition hardening to detect UB earlier. + _Mysize = 0; +#endif } _CONSTEXPR20 _Alvbase& _Getal() noexcept { diff --git a/stl/inc/xhash b/stl/inc/xhash index 0df519a86ff..beb31a12d26 100644 --- a/stl/inc/xhash +++ b/stl/inc/xhash @@ -436,6 +436,16 @@ public: #endif // _ENABLE_STL_INTERNAL_CHECK } +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + ~_Hash() noexcept { + // Destroying _List and _Vec will store tombstones in their pointers. + // We'll also set _Mask and _Maxidx back to the values used by _Hash's default constructor. Although this + // doesn't provide the same benefit as zeroing out size fields, it's consistent with the pattern elsewhere. + _Mask = _Min_buckets - 1; + _Maxidx = _Min_buckets; + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES + private: void _Swap_val(_Hash& _Right) noexcept { // swap contents with equal allocator _Hash _Right _List._Swap_val(_Right._List); diff --git a/stl/inc/xpolymorphic_allocator.h b/stl/inc/xpolymorphic_allocator.h index ea0825c7c9f..53fe25e7942 100644 --- a/stl/inc/xpolymorphic_allocator.h +++ b/stl/inc/xpolymorphic_allocator.h @@ -221,6 +221,13 @@ namespace pmr { polymorphic_allocator& operator=(const polymorphic_allocator&) = delete; +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + ~polymorphic_allocator() noexcept { + const auto _Tombstone{reinterpret_cast(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Resource = _Tombstone; + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES + _NODISCARD_RAW_PTR_ALLOC __declspec(allocator) _Ty* allocate(_CRT_GUARDOVERFLOW const size_t _Count) { // get space for _Count objects of type _Ty from _Resource void* const _Vp = _Resource->allocate(_Get_size_of_n(_Count), alignof(_Ty)); diff --git a/stl/inc/xstring b/stl/inc/xstring index 52926040086..85f7f8a89fe 100644 --- a/stl/inc/xstring +++ b/stl/inc/xstring @@ -401,6 +401,25 @@ public: _CONSTEXPR20 _String_val() noexcept : _Bx() {} +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + _CONSTEXPR20 ~_String_val() noexcept { + if constexpr (is_pointer_v) { + if (!_STD _Is_constant_evaluated()) { + const auto _Tombstone{reinterpret_cast(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Bx._Ptr = _Tombstone; + _Mysize = 0; + _Myres = (_Small_string_capacity + 1) | _Alloc_mask; // first capacity when entering large mode + + // The capacity indicates whether we're in small mode or large mode; see _Large_mode_engaged(). + // The string would be usable in small mode, so we need large mode for the tombstone to be effective. + // `_Small_string_capacity + 1` would be sufficient to make _Large_mode_engaged() return true. However, + // basic_string uses a "roundup mask" when allocating; see _Calculate_growth(). So to avoid confusing + // the SSO logic, we use the first capacity that would normally be used when entering large mode. + } + } + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES + // length of internal buffer, [1, 16] (NB: used by the debugger visualizer) static constexpr size_type _BUF_SIZE = 16 / sizeof(value_type) < 1 ? 1 : 16 / sizeof(value_type); // roundup mask for allocated buffers, [0, 15] diff --git a/stl/inc/xtree b/stl/inc/xtree index 9738b831130..d0237880371 100644 --- a/stl/inc/xtree +++ b/stl/inc/xtree @@ -449,6 +449,16 @@ public: _Tree_val() noexcept : _Myhead(), _Mysize(0) {} +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES + ~_Tree_val() noexcept { + if constexpr (is_pointer_v<_Nodeptr>) { + const auto _Tombstone{reinterpret_cast<_Nodeptr>(_MSVC_STL_UINTPTR_TOMBSTONE_VALUE)}; + _Myhead = _Tombstone; + _Mysize = 0; + } + } +#endif // _MSVC_STL_DESTRUCTOR_TOMBSTONES + enum _Redbl { // colors for link to parent _Red, _Black diff --git a/stl/inc/yvals.h b/stl/inc/yvals.h index bae84f1e6b1..c8cfa973615 100644 --- a/stl/inc/yvals.h +++ b/stl/inc/yvals.h @@ -272,6 +272,14 @@ _EMIT_STL_ERROR(STL1008, "_STL_CALL_ABORT_INSTEAD_OF_INVALID_PARAMETER has been #define _STL_INTERNAL_CHECK(...) _Analysis_assume_(__VA_ARGS__) #endif // ^^^ !defined(_ENABLE_STL_INTERNAL_CHECK) ^^^ +#ifndef _MSVC_STL_DESTRUCTOR_TOMBSTONES +#define _MSVC_STL_DESTRUCTOR_TOMBSTONES 0 +#endif + +#ifndef _MSVC_STL_UINTPTR_TOMBSTONE_VALUE +#define _MSVC_STL_UINTPTR_TOMBSTONE_VALUE uintptr_t{19937} +#endif + #include #ifdef _STATIC_CPPLIB diff --git a/tests/std/test.lst b/tests/std/test.lst index e188823bfcf..1c40a3294d8 100644 --- a/tests/std/test.lst +++ b/tests/std/test.lst @@ -254,6 +254,7 @@ tests\GH_004845_logical_operator_traits_with_non_bool_constant tests\GH_004929_internal_tag_constructors tests\GH_004930_char_traits_user_specialization tests\GH_005090_stl_hardening +tests\GH_005315_destructor_tombstones tests\LWG2381_num_get_floating_point tests\LWG2597_complex_branch_cut tests\LWG3018_shared_ptr_function diff --git a/tests/std/tests/GH_005315_destructor_tombstones/env.lst b/tests/std/tests/GH_005315_destructor_tombstones/env.lst new file mode 100644 index 00000000000..2afe136720a --- /dev/null +++ b/tests/std/tests/GH_005315_destructor_tombstones/env.lst @@ -0,0 +1,6 @@ +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +RUNALL_INCLUDE ..\usual_matrix.lst +RUNALL_CROSSLIST +* PM_CL="/D_MSVC_STL_DESTRUCTOR_TOMBSTONES=1" diff --git a/tests/std/tests/GH_005315_destructor_tombstones/test.cpp b/tests/std/tests/GH_005315_destructor_tombstones/test.cpp new file mode 100644 index 00000000000..7e9a0bb290b --- /dev/null +++ b/tests/std/tests/GH_005315_destructor_tombstones/test.cpp @@ -0,0 +1,188 @@ +// Copyright (c) Microsoft Corporation. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +// env.lst defines _MSVC_STL_DESTRUCTOR_TOMBSTONES to 1. + +#ifdef _M_CEE // work around a sporadic hang in /clr configurations +int main() {} +#else // ^^^ workaround / no workaround vvv + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if _HAS_CXX17 +#include +#include +#include +#include +#endif + +#include + +using namespace std; + +// A good test is one that consistently gets away with UB when destructor tombstones are disabled, +// but reliably terminates when destructor tombstones are enabled. +// A great test is when not even ASan can detect the UB when destructor tombstones are disabled. +// (Note that it may not always be possible to write a great test. For example, when +// dynamically allocated sentinel nodes are involved, ASan is extremely good at detecting UB.) + +template +void call_on_destroyed_object(Func func, Args&&... args) { + alignas(T) unsigned char storage[sizeof(T)]{}; + T& t = *::new (static_cast(storage)) T(forward(args)...); + t.~T(); + func(t); // obviously undefined behavior +} + +void test_vector() { + call_on_destroyed_object>([](auto& v) { v.push_back(17.29); }); +} +void test_vector_bool() { + call_on_destroyed_object>([](auto& vb) { vb.push_back(true); }); +} +void test_deque() { + call_on_destroyed_object>([](auto& d) { d.shrink_to_fit(); }); +} +void test_list() { + call_on_destroyed_object>([](auto& l) { (void) (l.begin() == l.end()); }); +} +void test_forward_list() { + call_on_destroyed_object>([](auto& fl) { (void) distance(fl.begin(), fl.end()); }); +} +void test_set() { + call_on_destroyed_object>([](auto& s) { (void) (s.begin() == s.end()); }); +} +void test_unordered_set() { + call_on_destroyed_object>([](auto& us) { (void) (us.begin() == us.end()); }); +} +void test_string() { + call_on_destroyed_object([](auto& str) { str[0] = '\0'; }); // 1-byte characters +} +void test_wstring() { + call_on_destroyed_object([](auto& wstr) { wstr[0] = L'\0'; }); // 2-byte characters +} +void test_u32string() { + call_on_destroyed_object([](auto& u32str) { u32str[0] = U'\0'; }); // 4-byte characters +} +void test_unique_ptr() { + call_on_destroyed_object>([](auto& up) { up.reset(); }); +} +void test_unique_ptr_array() { + call_on_destroyed_object>([](auto& upa) { upa.reset(); }); +} +void test_shared_ptr() { + call_on_destroyed_object>([](auto& sp) { sp.reset(); }); +} +void test_weak_ptr() { + call_on_destroyed_object>([](auto& wp) { (void) wp.expired(); }); +} +void test_exception_ptr() { + call_on_destroyed_object([](auto& ep) { ep = nullptr; }); +} +void test_function() { + call_on_destroyed_object>([](auto& f) { f = nullptr; }); +} +void test_regex() { + call_on_destroyed_object([](auto& r) { (void) r.mark_count(); }); +} +void test_valarray() { + call_on_destroyed_object>([](auto& va) { va.resize(10); }); +} + +#if _HAS_CXX17 +void test_any() { + call_on_destroyed_object([](auto& a) { any other{move(a)}; }); +} +void test_optional() { + call_on_destroyed_object>([](auto& o) { o.value() = "woof"; }, "meow"); +} +void test_variant() { + call_on_destroyed_object>([](auto& var) { get(var) = "woof"; }, "meow"); +} +void test_polymorphic_allocator() { + call_on_destroyed_object>([](auto& pa) { (void) pa.allocate(1); }); +} +#endif // _HAS_CXX17 + +#if _HAS_CXX23 +void test_move_only_function() { + call_on_destroyed_object>([](auto& mof) { mof = nullptr; }); +} +#endif // _HAS_CXX23 + +int main(int argc, char* argv[]) { + std_testing::death_test_executive exec; + + exec.add_death_tests({ + test_vector, + test_vector_bool, + test_deque, + test_list, + test_forward_list, + test_set, + test_unordered_set, + test_string, + test_wstring, + test_u32string, + test_unique_ptr, + test_unique_ptr_array, + test_shared_ptr, + test_weak_ptr, + test_exception_ptr, + test_function, + test_regex, + test_valarray, + +#if _HAS_CXX17 + test_any, + test_optional, + test_variant, + test_polymorphic_allocator, +#endif // _HAS_CXX17 + +#if _HAS_CXX23 + test_move_only_function, +#endif // _HAS_CXX23 + }); + + return exec.run(argc, argv); +} + +#if _HAS_CXX20 +// Verify that destructor tombstones don't interfere with constexpr. +constexpr bool test_constexpr() { // COMPILE-ONLY + vector v(10, 3.14); + vector vb(10, true); + string str{"cats"}; + wstring wstr{L"meow"}; + u32string u32str{U"purr"}; + optional o{"hiss"}; + +#if _HAS_CXX23 + unique_ptr up{new double{3.14}}; + unique_ptr upa{new double[10]{}}; +#endif // _HAS_CXX23 + + return true; +} + +static_assert(test_constexpr()); +#endif // _HAS_CXX20 + +#endif // ^^^ no workaround ^^^ diff --git a/tests/std/tests/VSO_0000000_wcfb01_idempotent_container_destructors/test.cpp b/tests/std/tests/VSO_0000000_wcfb01_idempotent_container_destructors/test.cpp index 7f320239cd1..ca6d179e981 100644 --- a/tests/std/tests/VSO_0000000_wcfb01_idempotent_container_destructors/test.cpp +++ b/tests/std/tests/VSO_0000000_wcfb01_idempotent_container_destructors/test.cpp @@ -7,23 +7,25 @@ using namespace std; +#if _MSVC_STL_DESTRUCTOR_TOMBSTONES +#error This test is fundamentally incompatible with destructor tombstones. +#endif + int main() { - // this is nonstandard behavior, but historically our containers - // have allowed it so we want to preserve it in the current ABI - // - // TRANSITION, ABI - // in the next ABI breaking release we will change this behavior to terminate programs that do nonstandard things - // like this + // Idempotent destruction is an abomination that our `string` and `vector` have historically allowed. + // We're preserving it for the time being, except when destructor tombstones are enabled. + + // TRANSITION, ABI: In the next ABI-breaking release, we'll stop supporting this nonstandard usage. { // small string - char buff[sizeof(string)]; + alignas(string) char buff[sizeof(string)]; string& s = *::new (buff) string; s.~string(); s.~string(); } { // big string - char buff[sizeof(string)]; + alignas(string) char buff[sizeof(string)]; string& s = *::new (buff) string("a really long string that is going to trigger separate allocation"); s.~string(); s.~string(); @@ -32,14 +34,14 @@ int main() { using vecT = vector; { // empty vector - char buff[sizeof(vecT)]; + alignas(vecT) char buff[sizeof(vecT)]; vecT& v = *::new (buff) vecT; v.~vecT(); v.~vecT(); } { // data vector - char buff[sizeof(vecT)]; + alignas(vecT) char buff[sizeof(vecT)]; vecT& v = *::new (buff) vecT; v.push_back(42); v.~vecT();