aboutsummaryrefslogtreecommitdiff
path: root/absl/container/internal/compressed_tuple_test.cc
diff options
context:
space:
mode:
authorVitaly Goldshteyn <goldvitaly@google.com>2024-05-20 11:57:11 -0700
committerCopybara-Service <copybara-worker@google.com>2024-05-20 11:57:56 -0700
commit6ab5b0aad86dc08d257f6b567611c231c6b8ac31 (patch)
tree3b627c722f43e7bac4acbaf89832c665a22bb5b2 /absl/container/internal/compressed_tuple_test.cc
parent0128305738355d085e079bab281a7211a00a5b83 (diff)
downloadabseil-6ab5b0aad86dc08d257f6b567611c231c6b8ac31.tar.gz
abseil-6ab5b0aad86dc08d257f6b567611c231c6b8ac31.tar.bz2
abseil-6ab5b0aad86dc08d257f6b567611c231c6b8ac31.zip
Move `prepare_insert` out of the line as type erased `PrepareInsertNonSoo`.
This significantly reduces binary size of big binaries and creates a single hot function instead of many cold. That is decreasing cash misses during code execution. We also avoid size related computations for tables with no deleted slots, when resize is necessary. PiperOrigin-RevId: 635527119 Change-Id: I763b135f1f6089051e62e348a07b33536af265ab
Diffstat (limited to 'absl/container/internal/compressed_tuple_test.cc')
-rw-r--r--absl/container/internal/compressed_tuple_test.cc28
1 files changed, 28 insertions, 0 deletions
diff --git a/absl/container/internal/compressed_tuple_test.cc b/absl/container/internal/compressed_tuple_test.cc
index 49818fb8..3cd9e18b 100644
--- a/absl/container/internal/compressed_tuple_test.cc
+++ b/absl/container/internal/compressed_tuple_test.cc
@@ -15,8 +15,11 @@
#include "absl/container/internal/compressed_tuple.h"
#include <memory>
+#include <set>
#include <string>
+#include <type_traits>
#include <utility>
+#include <vector>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
@@ -55,6 +58,7 @@ namespace {
using absl::test_internal::CopyableMovableInstance;
using absl::test_internal::InstanceTracker;
+using ::testing::Each;
TEST(CompressedTupleTest, Sizeof) {
EXPECT_EQ(sizeof(int), sizeof(CompressedTuple<int>));
@@ -71,6 +75,30 @@ TEST(CompressedTupleTest, Sizeof) {
sizeof(CompressedTuple<int, Empty<0>, NotEmpty<double>, Empty<1>>));
}
+TEST(CompressedTupleTest, PointerToEmpty) {
+ auto to_void_ptrs = [](const auto&... objs) {
+ return std::vector<const void*>{static_cast<const void*>(&objs)...};
+ };
+ {
+ using Tuple = CompressedTuple<int, Empty<0>>;
+ EXPECT_EQ(sizeof(int), sizeof(Tuple));
+ Tuple t;
+ EXPECT_THAT(to_void_ptrs(t.get<1>()), Each(&t));
+ }
+ {
+ using Tuple = CompressedTuple<int, Empty<0>, Empty<1>>;
+ EXPECT_EQ(sizeof(int), sizeof(Tuple));
+ Tuple t;
+ EXPECT_THAT(to_void_ptrs(t.get<1>(), t.get<2>()), Each(&t));
+ }
+ {
+ using Tuple = CompressedTuple<int, Empty<0>, Empty<1>, Empty<2>>;
+ EXPECT_EQ(sizeof(int), sizeof(Tuple));
+ Tuple t;
+ EXPECT_THAT(to_void_ptrs(t.get<1>(), t.get<2>(), t.get<3>()), Each(&t));
+ }
+}
+
TEST(CompressedTupleTest, OneMoveOnRValueConstructionTemp) {
InstanceTracker tracker;
CompressedTuple<CopyableMovableInstance> x1(CopyableMovableInstance(1));