diff --git a/common/map_test.cpp b/common/map_test.cpp index 7b7cf1c9ec2f0..fcee5e0f734c1 100644 --- a/common/map_test.cpp +++ b/common/map_test.cpp @@ -14,11 +14,26 @@ #include "common/raw_hashtable_test_helpers.h" +// Workaround for std::pair comparison deficiency in libc++ 16. +#if defined(_LIBCPP_VERSION) && _LIBCPP_VERSION < 170000 +namespace std { +template + requires(convertible_to && convertible_to) +inline auto operator==( + pair, std::reference_wrapper> lhs, + pair rhs) -> bool { + return lhs.first == static_cast(rhs.first) && + lhs.second == static_cast(rhs.second); +} +} // namespace std +#endif + namespace Carbon::Testing { namespace { using RawHashtable::FixedHashKeyContext; using RawHashtable::IndexKeyContext; +using RawHashtable::MoveOnlyTestData; using RawHashtable::TestData; using RawHashtable::TestKeyContext; using ::testing::Pair; @@ -29,9 +44,11 @@ auto ExpectMapElementsAre(MapT&& m, MatcherRangeT element_matchers) -> void { // Now collect the elements into a container. using KeyT = typename std::remove_reference::type::KeyT; using ValueT = typename std::remove_reference::type::ValueT; - std::vector> map_entries; + std::vector< + std::pair, std::reference_wrapper>> + map_entries; m.ForEach([&map_entries](KeyT& k, ValueT& v) { - map_entries.push_back({k, v}); + map_entries.push_back({std::ref(k), std::ref(v)}); }); // Use the GoogleMock unordered container matcher to validate and show errors @@ -68,6 +85,9 @@ auto MakeKeyValues(ValueCB value_cb, RangeT&& range, RangeTs&&... ranges) template class MapTest : public ::testing::Test {}; +template +class MoveOnlyMapTest : public ::testing::Test {}; + using Types = ::testing::Types< Map, Map, Map, Map, Map, @@ -76,6 +96,14 @@ using Types = ::testing::Types< Map>; TYPED_TEST_SUITE(MapTest, Types); +using MoveOnlyTypes = ::testing::Types< + Map, + Map, + Map, + Map, + Map>; +TYPED_TEST_SUITE(MoveOnlyMapTest, MoveOnlyTypes); + TYPED_TEST(MapTest, Basic) { TypeParam m; @@ -201,7 +229,7 @@ TYPED_TEST(MapTest, Move) { // large. for (int i : llvm::seq(1, 24)) { SCOPED_TRACE(llvm::formatv("Key: {0}", i).str()); - ASSERT_TRUE(m.Insert(i, i * 100).is_inserted()); + EXPECT_TRUE(m.Insert(i, i * 100).is_inserted()); } MapT other_m1 = std::move(m); @@ -244,10 +272,10 @@ TYPED_TEST(MapTest, Move) { ExpectMapElementsAre( other_m1, MakeKeyValues([](int k) { return k * 100; }, llvm::seq(1, 32))); - // Test copying of a moved-from table over a valid table and self-move-assign. - // The former is required to be valid, and the latter is in at least the case - // of self-move-assign-when-moved-from, but the result can be in any state so - // just do them and ensure we don't crash. + // Test copying of a moved-from table over a valid table and + // self-move-assign. The former is required to be valid, and the latter is + // in at least the case of self-move-assign-when-moved-from, but the result + // can be in any state so just do them and ensure we don't crash. MapT other_m2 = other_m1; // NOLINTNEXTLINE(bugprone-use-after-move): Testing required use-after-move. other_m2 = m; @@ -255,6 +283,64 @@ TYPED_TEST(MapTest, Move) { m = std::move(m); } +TYPED_TEST(MoveOnlyMapTest, MoveOnlyTypes) { + using MapT = TypeParam; + static_assert(!std::is_copy_assignable_v); + static_assert(!std::is_copy_constructible_v); + static_assert(std::is_move_assignable_v); + static_assert(std::is_move_constructible_v); + + auto make_map = [] { + MapT m; + // Make sure we exceed the small size for some of the map types, but not all + // of them, so we cover all the combinations of moving between small and + // large. + for (int i : llvm::seq(1, 24)) { + SCOPED_TRACE(llvm::formatv("Key: {0}", i).str()); + EXPECT_TRUE(m.Insert(i, i * 100).is_inserted()); + } + return m; + }; + + MapT m = make_map(); + + MapT other_m1 = std::move(m); + ExpectMapElementsAre( + other_m1, MakeKeyValues([](int k) { return k * 100; }, llvm::seq(1, 24))); + + // Add some more elements. + for (int i : llvm::seq(24, 32)) { + SCOPED_TRACE(llvm::formatv("Key: {0}", i).str()); + ASSERT_TRUE(other_m1.Insert(i, i * 100).is_inserted()); + } + ExpectMapElementsAre( + other_m1, MakeKeyValues([](int k) { return k * 100; }, llvm::seq(1, 32))); + + // Move back over a moved-from. + m = std::move(other_m1); + ExpectMapElementsAre( + m, MakeKeyValues([](int k) { return k * 100; }, llvm::seq(1, 32))); + + // Now add still more elements, crossing the small size limit for all tested + // map types. + for (int i : llvm::seq(32, 72)) { + SCOPED_TRACE(llvm::formatv("Key: {0}", i).str()); + ASSERT_TRUE(m.Insert(i, i * 100).is_inserted()); + } + ExpectMapElementsAre( + m, MakeKeyValues([](int k) { return k * 100; }, llvm::seq(1, 72))); + + // Assignment replaces the contents. + m = make_map(); + ExpectMapElementsAre( + m, MakeKeyValues([](int k) { return k * 100; }, llvm::seq(1, 24))); + + // Self-swap (which does a self-move) works and is a no-op. + std::swap(m, m); + ExpectMapElementsAre( + m, MakeKeyValues([](int k) { return k * 100; }, llvm::seq(1, 24))); +} + TYPED_TEST(MapTest, Conversions) { using MapT = TypeParam; using KeyT = MapT::KeyT; diff --git a/common/raw_hashtable.h b/common/raw_hashtable.h index ace1e3a507994..9fc498b76cd83 100644 --- a/common/raw_hashtable.h +++ b/common/raw_hashtable.h @@ -163,6 +163,10 @@ struct StorageEntry { IsTriviallyDestructible && std::is_trivially_move_constructible_v && std::is_trivially_move_constructible_v; + static constexpr bool IsCopyable = + IsTriviallyRelocatable || (std::is_copy_constructible_v && + std::is_copy_constructible_v); + auto key() const -> const KeyT& { // Ensure we don't need more alignment than available. Inside a method body // to apply to the complete type. @@ -233,6 +237,9 @@ struct StorageEntry { static constexpr bool IsTriviallyRelocatable = IsTriviallyDestructible && std::is_trivially_move_constructible_v; + static constexpr bool IsCopyable = + IsTriviallyRelocatable || std::is_copy_constructible_v; + auto key() const -> const KeyT& { // Ensure we don't need more alignment than available. static_assert( @@ -252,7 +259,9 @@ struct StorageEntry { key().~KeyT(); } - auto CopyFrom(const StorageEntry& entry) -> void { + auto CopyFrom(const StorageEntry& entry) -> void + requires(IsCopyable) + { if constexpr (IsTriviallyRelocatable) { memcpy(this, &entry, sizeof(StorageEntry)); } else { @@ -567,7 +576,8 @@ class BaseImpl { auto Construct(Storage* small_storage) -> void; auto Destroy() -> void; - auto CopySlotsFrom(const BaseImpl& arg) -> void; + auto CopySlotsFrom(const BaseImpl& arg) -> void + requires(EntryT::IsCopyable); auto MoveFrom(BaseImpl&& arg, Storage* small_storage) -> void; auto InsertIntoEmpty(HashCode hash) -> EntryT*; @@ -611,9 +621,11 @@ class TableImpl : public InputBaseT { using BaseT = InputBaseT; TableImpl() : BaseT(SmallSize, small_storage()) {} - TableImpl(const TableImpl& arg); + TableImpl(const TableImpl& arg) + requires(BaseT::EntryT::IsCopyable); TableImpl(TableImpl&& arg) noexcept; - auto operator=(const TableImpl& arg) -> TableImpl&; + auto operator=(const TableImpl& arg) -> TableImpl& + requires(BaseT::EntryT::IsCopyable); auto operator=(TableImpl&& arg) noexcept -> TableImpl&; ~TableImpl(); @@ -1202,7 +1214,9 @@ auto BaseImpl::Destroy() -> void { // storage. template auto BaseImpl::CopySlotsFrom( - const BaseImpl& arg) -> void { + const BaseImpl& arg) -> void + requires(EntryT::IsCopyable) +{ CARBON_DCHECK(alloc_size() == arg.alloc_size()); ssize_t local_size = alloc_size(); @@ -1545,6 +1559,7 @@ BaseImpl::GrowAndInsert( template TableImpl::TableImpl(const TableImpl& arg) + requires(BaseT::EntryT::IsCopyable) : BaseT(arg.alloc_size(), arg.growth_budget_, SmallSize) { // Check for completely broken objects. These invariants should be true even // in a moved-from state. @@ -1561,7 +1576,9 @@ TableImpl::TableImpl(const TableImpl& arg) template auto TableImpl::operator=(const TableImpl& arg) - -> TableImpl& { + -> TableImpl& + requires(BaseT::EntryT::IsCopyable) +{ // Check for completely broken objects. These invariants should be true even // in a moved-from state. CARBON_DCHECK(arg.alloc_size() == 0 || !arg.is_small() || diff --git a/common/raw_hashtable_test_helpers.h b/common/raw_hashtable_test_helpers.h index 83d8bf58e3abc..ee0ad5d479a91 100644 --- a/common/raw_hashtable_test_helpers.h +++ b/common/raw_hashtable_test_helpers.h @@ -43,6 +43,48 @@ struct TestData : Printable { } }; +static_assert(std::is_copy_constructible_v); + +// Non-trivial type for testing. +struct MoveOnlyTestData : Printable { + int value; + + // NOLINTNEXTLINE: google-explicit-constructor + MoveOnlyTestData(int v) : value(v) { CARBON_CHECK(value >= 0); } + ~MoveOnlyTestData() { + CARBON_CHECK(value >= 0); + value = -1; + } + MoveOnlyTestData(MoveOnlyTestData&& other) noexcept + : MoveOnlyTestData(other.value) { + other.value = 0; + } + auto operator=(MoveOnlyTestData&& other) noexcept -> MoveOnlyTestData& { + value = other.value; + other.value = 0; + return *this; + } + auto Print(llvm::raw_ostream& out) const -> void { out << value; } + + friend auto operator==(const MoveOnlyTestData& lhs, + const MoveOnlyTestData& rhs) -> bool { + return lhs.value == rhs.value; + } + + friend auto operator<=>(const MoveOnlyTestData& lhs, + const MoveOnlyTestData& rhs) -> std::strong_ordering { + return lhs.value <=> rhs.value; + } + + friend auto CarbonHashValue(const MoveOnlyTestData& data, uint64_t seed) + -> HashCode { + return Carbon::HashValue(data.value, seed); + } +}; + +static_assert(!std::is_copy_constructible_v); +static_assert(std::is_move_constructible_v); + // Test stateless key context that produces different hashes from normal. // Changing the hash values should result in test failures if the context ever // fails to be used. diff --git a/common/set_test.cpp b/common/set_test.cpp index 81ad810427cbb..27593025efcd8 100644 --- a/common/set_test.cpp +++ b/common/set_test.cpp @@ -17,6 +17,7 @@ namespace Carbon { namespace { using RawHashtable::IndexKeyContext; +using RawHashtable::MoveOnlyTestData; using RawHashtable::TestData; using ::testing::UnorderedElementsAreArray; @@ -24,8 +25,8 @@ template auto ExpectSetElementsAre(SetT&& s, MatcherRangeT element_matchers) -> void { // Collect the elements into a container. using KeyT = typename std::remove_reference::type::KeyT; - std::vector entries; - s.ForEach([&entries](KeyT& k) { entries.push_back(k); }); + std::vector> entries; + s.ForEach([&entries](KeyT& k) { entries.push_back(std::ref(k)); }); // Use the GoogleMock unordered container matcher to validate and show errors // on wrong elements. @@ -58,10 +59,18 @@ auto MakeElements(RangeT&& range, RangeTs&&... ranges) { template class SetTest : public ::testing::Test {}; +template +class MoveOnlySetTest : public ::testing::Test {}; + using Types = ::testing::Types, Set, Set, Set, Set>; TYPED_TEST_SUITE(SetTest, Types); +using MoveOnlyTypes = + ::testing::Types, Set, + Set>; +TYPED_TEST_SUITE(MoveOnlySetTest, MoveOnlyTypes); + TYPED_TEST(SetTest, Basic) { using SetT = TypeParam; SetT s; @@ -162,7 +171,7 @@ TYPED_TEST(SetTest, Move) { // large. for (int i : llvm::seq(1, 24)) { SCOPED_TRACE(llvm::formatv("Key: {0}", i).str()); - ASSERT_TRUE(s.Insert(i).is_inserted()); + EXPECT_TRUE(s.Insert(i).is_inserted()); } SetT other_s1 = std::move(s); @@ -198,10 +207,10 @@ TYPED_TEST(SetTest, Move) { std::swap(other_s1, other_s1); ExpectSetElementsAre(other_s1, MakeElements(llvm::seq(1, 32))); - // Test copying of a moved-from table over a valid table and self-move-assign. - // The former is required to be valid, and the latter is in at least the case - // of self-move-assign-when-moved-from, but the result can be in any state so - // just do them and ensure we don't crash. + // Test copying of a moved-from table over a valid table and + // self-move-assign. The former is required to be valid, and the latter is + // in at least the case of self-move-assign-when-moved-from, but the result + // can be in any state so just do them and ensure we don't crash. SetT other_s2 = other_s1; // NOLINTNEXTLINE(bugprone-use-after-move): Testing required use-after-move. other_s2 = s; @@ -209,6 +218,58 @@ TYPED_TEST(SetTest, Move) { s = std::move(s); } +TYPED_TEST(MoveOnlySetTest, Move) { + using SetT = TypeParam; + static_assert(!std::is_copy_assignable_v); + static_assert(!std::is_copy_constructible_v); + static_assert(std::is_move_assignable_v); + static_assert(std::is_move_constructible_v); + + auto make_set = [] { + SetT s; + // Make sure we exceed the small size for some of the set types, but not all + // of them, so we cover all the combinations of copying between small and + // large. + for (int i : llvm::seq(1, 24)) { + SCOPED_TRACE(llvm::formatv("Key: {0}", i).str()); + EXPECT_TRUE(s.Insert(i).is_inserted()); + } + return s; + }; + + SetT s = make_set(); + + SetT other_s1 = std::move(s); + ExpectSetElementsAre(other_s1, MakeElements(llvm::seq(1, 24))); + + // Add some more elements. + for (int i : llvm::seq(24, 32)) { + SCOPED_TRACE(llvm::formatv("Key: {0}", i).str()); + ASSERT_TRUE(other_s1.Insert(i).is_inserted()); + } + ExpectSetElementsAre(other_s1, MakeElements(llvm::seq(1, 32))); + + // Move back over a moved-from. + s = std::move(other_s1); + ExpectSetElementsAre(s, MakeElements(llvm::seq(1, 32))); + + // Now add still more elements, crossing the small size limit for all tested + // map types. + for (int i : llvm::seq(32, 72)) { + SCOPED_TRACE(llvm::formatv("Key: {0}", i).str()); + ASSERT_TRUE(s.Insert(i).is_inserted()); + } + ExpectSetElementsAre(s, MakeElements(llvm::seq(1, 72))); + + // Assignment replaces the contents. + s = make_set(); + ExpectSetElementsAre(s, MakeElements(llvm::seq(1, 24))); + + // Self-swap (which does a self-move) works and is a no-op. + std::swap(s, s); + ExpectSetElementsAre(s, MakeElements(llvm::seq(1, 24))); +} + TYPED_TEST(SetTest, Conversions) { using SetT = TypeParam; using KeyT = SetT::KeyT;