1 // Copyright 2019 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "absl/container/inlined_vector.h"
16
17 #include <algorithm>
18 #include <forward_list>
19 #include <iterator>
20 #include <list>
21 #include <memory>
22 #include <scoped_allocator>
23 #include <sstream>
24 #include <stdexcept>
25 #include <string>
26 #include <utility>
27 #include <vector>
28
29 #include "gmock/gmock.h"
30 #include "gtest/gtest.h"
31 #include "absl/base/attributes.h"
32 #include "absl/base/internal/exception_testing.h"
33 #include "absl/base/internal/raw_logging.h"
34 #include "absl/base/macros.h"
35 #include "absl/base/options.h"
36 #include "absl/container/internal/counting_allocator.h"
37 #include "absl/container/internal/test_instance_tracker.h"
38 #include "absl/hash/hash_testing.h"
39 #include "absl/memory/memory.h"
40 #include "absl/strings/str_cat.h"
41
42 namespace {
43
44 using absl::container_internal::CountingAllocator;
45 using absl::test_internal::CopyableMovableInstance;
46 using absl::test_internal::CopyableOnlyInstance;
47 using absl::test_internal::InstanceTracker;
48 using testing::AllOf;
49 using testing::Each;
50 using testing::ElementsAre;
51 using testing::ElementsAreArray;
52 using testing::Eq;
53 using testing::Gt;
54 using testing::Pointwise;
55 using testing::PrintToString;
56
57 using IntVec = absl::InlinedVector<int, 8>;
58
59 MATCHER_P(SizeIs, n, "") {
60 return testing::ExplainMatchResult(n, arg.size(), result_listener);
61 }
62
63 MATCHER_P(CapacityIs, n, "") {
64 return testing::ExplainMatchResult(n, arg.capacity(), result_listener);
65 }
66
67 MATCHER_P(ValueIs, e, "") {
68 return testing::ExplainMatchResult(e, arg.value(), result_listener);
69 }
70
71 // TODO(bsamwel): Add support for movable-only types.
72
73 // Test fixture for typed tests on BaseCountedInstance derived classes, see
74 // test_instance_tracker.h.
75 template <typename T>
76 class InstanceTest : public ::testing::Test {};
77 TYPED_TEST_SUITE_P(InstanceTest);
78
79 // A simple reference counted class to make sure that the proper elements are
80 // destroyed in the erase(begin, end) test.
81 class RefCounted {
82 public:
RefCounted(int value,int * count)83 RefCounted(int value, int* count) : value_(value), count_(count) { Ref(); }
84
RefCounted(const RefCounted & v)85 RefCounted(const RefCounted& v) : value_(v.value_), count_(v.count_) {
86 Ref();
87 }
88
~RefCounted()89 ~RefCounted() {
90 Unref();
91 count_ = nullptr;
92 }
93
swap(RefCounted & a,RefCounted & b)94 friend void swap(RefCounted& a, RefCounted& b) {
95 using std::swap;
96 swap(a.value_, b.value_);
97 swap(a.count_, b.count_);
98 }
99
operator =(RefCounted v)100 RefCounted& operator=(RefCounted v) {
101 using std::swap;
102 swap(*this, v);
103 return *this;
104 }
105
Ref() const106 void Ref() const {
107 ABSL_RAW_CHECK(count_ != nullptr, "");
108 ++(*count_);
109 }
110
Unref() const111 void Unref() const {
112 --(*count_);
113 ABSL_RAW_CHECK(*count_ >= 0, "");
114 }
115
116 int value_;
117 int* count_;
118 };
119
120 using RefCountedVec = absl::InlinedVector<RefCounted, 8>;
121
122 // A class with a vtable pointer
123 class Dynamic {
124 public:
~Dynamic()125 virtual ~Dynamic() {}
126 };
127
128 using DynamicVec = absl::InlinedVector<Dynamic, 8>;
129
130 // Append 0..len-1 to *v
131 template <typename Container>
Fill(Container * v,size_t len,int offset=0)132 static void Fill(Container* v, size_t len, int offset = 0) {
133 for (size_t i = 0; i < len; i++) {
134 v->push_back(static_cast<int>(i) + offset);
135 }
136 }
137
Fill(size_t len,int offset=0)138 static IntVec Fill(size_t len, int offset = 0) {
139 IntVec v;
140 Fill(&v, len, offset);
141 return v;
142 }
143
TEST(IntVec,SimpleOps)144 TEST(IntVec, SimpleOps) {
145 for (size_t len = 0; len < 20; len++) {
146 IntVec v;
147 const IntVec& cv = v; // const alias
148
149 Fill(&v, len);
150 EXPECT_EQ(len, v.size());
151 EXPECT_LE(len, v.capacity());
152
153 for (size_t i = 0; i < len; i++) {
154 EXPECT_EQ(static_cast<int>(i), v[i]);
155 EXPECT_EQ(static_cast<int>(i), v.at(i));
156 }
157 EXPECT_EQ(v.begin(), v.data());
158 EXPECT_EQ(cv.begin(), cv.data());
159
160 size_t counter = 0;
161 for (IntVec::iterator iter = v.begin(); iter != v.end(); ++iter) {
162 EXPECT_EQ(static_cast<int>(counter), *iter);
163 counter++;
164 }
165 EXPECT_EQ(counter, len);
166
167 counter = 0;
168 for (IntVec::const_iterator iter = v.begin(); iter != v.end(); ++iter) {
169 EXPECT_EQ(static_cast<int>(counter), *iter);
170 counter++;
171 }
172 EXPECT_EQ(counter, len);
173
174 counter = 0;
175 for (IntVec::const_iterator iter = v.cbegin(); iter != v.cend(); ++iter) {
176 EXPECT_EQ(static_cast<int>(counter), *iter);
177 counter++;
178 }
179 EXPECT_EQ(counter, len);
180
181 if (len > 0) {
182 EXPECT_EQ(0, v.front());
183 EXPECT_EQ(static_cast<int>(len - 1), v.back());
184 v.pop_back();
185 EXPECT_EQ(len - 1, v.size());
186 for (size_t i = 0; i < v.size(); ++i) {
187 EXPECT_EQ(static_cast<int>(i), v[i]);
188 EXPECT_EQ(static_cast<int>(i), v.at(i));
189 }
190 }
191 }
192 }
193
TEST(IntVec,PopBackNoOverflow)194 TEST(IntVec, PopBackNoOverflow) {
195 IntVec v = {1};
196 v.pop_back();
197 EXPECT_EQ(v.size(), 0u);
198 }
199
TEST(IntVec,AtThrows)200 TEST(IntVec, AtThrows) {
201 IntVec v = {1, 2, 3};
202 EXPECT_EQ(v.at(2), 3);
203 ABSL_BASE_INTERNAL_EXPECT_FAIL(v.at(3), std::out_of_range,
204 "failed bounds check");
205 }
206
TEST(IntVec,ReverseIterator)207 TEST(IntVec, ReverseIterator) {
208 for (size_t len = 0; len < 20; len++) {
209 IntVec v;
210 Fill(&v, len);
211
212 size_t counter = len;
213 for (IntVec::reverse_iterator iter = v.rbegin(); iter != v.rend(); ++iter) {
214 counter--;
215 EXPECT_EQ(static_cast<int>(counter), *iter);
216 }
217 EXPECT_EQ(counter, 0u);
218
219 counter = len;
220 for (IntVec::const_reverse_iterator iter = v.rbegin(); iter != v.rend();
221 ++iter) {
222 counter--;
223 EXPECT_EQ(static_cast<int>(counter), *iter);
224 }
225 EXPECT_EQ(counter, 0u);
226
227 counter = len;
228 for (IntVec::const_reverse_iterator iter = v.crbegin(); iter != v.crend();
229 ++iter) {
230 counter--;
231 EXPECT_EQ(static_cast<int>(counter), *iter);
232 }
233 EXPECT_EQ(counter, 0u);
234 }
235 }
236
TEST(IntVec,Erase)237 TEST(IntVec, Erase) {
238 for (size_t len = 1; len < 20; len++) {
239 for (size_t i = 0; i < len; ++i) {
240 IntVec v;
241 Fill(&v, len);
242 v.erase(v.begin() + i);
243 EXPECT_EQ(len - 1, v.size());
244 for (size_t j = 0; j < i; ++j) {
245 EXPECT_EQ(static_cast<int>(j), v[j]);
246 }
247 for (size_t j = i; j < len - 1; ++j) {
248 EXPECT_EQ(static_cast<int>(j + 1), v[j]);
249 }
250 }
251 }
252 }
253
TEST(IntVec,Hardened)254 TEST(IntVec, Hardened) {
255 IntVec v;
256 Fill(&v, 10);
257 EXPECT_EQ(v[9], 9);
258 #if !defined(NDEBUG) || ABSL_OPTION_HARDENED
259 EXPECT_DEATH_IF_SUPPORTED(v[10], "");
260 EXPECT_DEATH_IF_SUPPORTED(v[static_cast<size_t>(-1)], "");
261 EXPECT_DEATH_IF_SUPPORTED(v.resize(v.max_size() + 1), "");
262 #endif
263 }
264
265 // At the end of this test loop, the elements between [erase_begin, erase_end)
266 // should have reference counts == 0, and all others elements should have
267 // reference counts == 1.
TEST(RefCountedVec,EraseBeginEnd)268 TEST(RefCountedVec, EraseBeginEnd) {
269 for (size_t len = 1; len < 20; ++len) {
270 for (size_t erase_begin = 0; erase_begin < len; ++erase_begin) {
271 for (size_t erase_end = erase_begin; erase_end <= len; ++erase_end) {
272 std::vector<int> counts(len, 0);
273 RefCountedVec v;
274 for (size_t i = 0; i < len; ++i) {
275 v.push_back(RefCounted(static_cast<int>(i), &counts[i]));
276 }
277
278 size_t erase_len = erase_end - erase_begin;
279
280 v.erase(v.begin() + erase_begin, v.begin() + erase_end);
281
282 EXPECT_EQ(len - erase_len, v.size());
283
284 // Check the elements before the first element erased.
285 for (size_t i = 0; i < erase_begin; ++i) {
286 EXPECT_EQ(static_cast<int>(i), v[i].value_);
287 }
288
289 // Check the elements after the first element erased.
290 for (size_t i = erase_begin; i < v.size(); ++i) {
291 EXPECT_EQ(static_cast<int>(i + erase_len), v[i].value_);
292 }
293
294 // Check that the elements at the beginning are preserved.
295 for (size_t i = 0; i < erase_begin; ++i) {
296 EXPECT_EQ(1, counts[i]);
297 }
298
299 // Check that the erased elements are destroyed
300 for (size_t i = erase_begin; i < erase_end; ++i) {
301 EXPECT_EQ(0, counts[i]);
302 }
303
304 // Check that the elements at the end are preserved.
305 for (size_t i = erase_end; i < len; ++i) {
306 EXPECT_EQ(1, counts[i]);
307 }
308 }
309 }
310 }
311 }
312
313 struct NoDefaultCtor {
NoDefaultCtor__anon480d74c70111::NoDefaultCtor314 explicit NoDefaultCtor(int) {}
315 };
316 struct NoCopy {
NoCopy__anon480d74c70111::NoCopy317 NoCopy() {}
318 NoCopy(const NoCopy&) = delete;
319 };
320 struct NoAssign {
NoAssign__anon480d74c70111::NoAssign321 NoAssign() {}
322 NoAssign& operator=(const NoAssign&) = delete;
323 };
324 struct MoveOnly {
MoveOnly__anon480d74c70111::MoveOnly325 MoveOnly() {}
326 MoveOnly(MoveOnly&&) = default;
327 MoveOnly& operator=(MoveOnly&&) = default;
328 };
TEST(InlinedVectorTest,NoDefaultCtor)329 TEST(InlinedVectorTest, NoDefaultCtor) {
330 absl::InlinedVector<NoDefaultCtor, 1> v(10, NoDefaultCtor(2));
331 (void)v;
332 }
TEST(InlinedVectorTest,NoCopy)333 TEST(InlinedVectorTest, NoCopy) {
334 absl::InlinedVector<NoCopy, 1> v(10);
335 (void)v;
336 }
TEST(InlinedVectorTest,NoAssign)337 TEST(InlinedVectorTest, NoAssign) {
338 absl::InlinedVector<NoAssign, 1> v(10);
339 (void)v;
340 }
TEST(InlinedVectorTest,MoveOnly)341 TEST(InlinedVectorTest, MoveOnly) {
342 absl::InlinedVector<MoveOnly, 2> v;
343 v.push_back(MoveOnly{});
344 v.push_back(MoveOnly{});
345 v.push_back(MoveOnly{});
346 v.erase(v.begin());
347 v.push_back(MoveOnly{});
348 v.erase(v.begin(), v.begin() + 1);
349 v.insert(v.begin(), MoveOnly{});
350 v.emplace(v.begin());
351 v.emplace(v.begin(), MoveOnly{});
352 }
TEST(InlinedVectorTest,Noexcept)353 TEST(InlinedVectorTest, Noexcept) {
354 EXPECT_TRUE(std::is_nothrow_move_constructible<IntVec>::value);
355 EXPECT_TRUE((std::is_nothrow_move_constructible<
356 absl::InlinedVector<MoveOnly, 2>>::value));
357
358 struct MoveCanThrow {
359 MoveCanThrow(MoveCanThrow&&) {}
360 };
361 EXPECT_EQ(absl::default_allocator_is_nothrow::value,
362 (std::is_nothrow_move_constructible<
363 absl::InlinedVector<MoveCanThrow, 2>>::value));
364 }
365
TEST(InlinedVectorTest,EmplaceBack)366 TEST(InlinedVectorTest, EmplaceBack) {
367 absl::InlinedVector<std::pair<std::string, int>, 1> v;
368
369 auto& inlined_element = v.emplace_back("answer", 42);
370 EXPECT_EQ(&inlined_element, &v[0]);
371 EXPECT_EQ(inlined_element.first, "answer");
372 EXPECT_EQ(inlined_element.second, 42);
373
374 auto& allocated_element = v.emplace_back("taxicab", 1729);
375 EXPECT_EQ(&allocated_element, &v[1]);
376 EXPECT_EQ(allocated_element.first, "taxicab");
377 EXPECT_EQ(allocated_element.second, 1729);
378 }
379
TEST(InlinedVectorTest,ShrinkToFitGrowingVector)380 TEST(InlinedVectorTest, ShrinkToFitGrowingVector) {
381 absl::InlinedVector<std::pair<std::string, int>, 1> v;
382
383 v.shrink_to_fit();
384 EXPECT_EQ(v.capacity(), 1u);
385
386 v.emplace_back("answer", 42);
387 v.shrink_to_fit();
388 EXPECT_EQ(v.capacity(), 1u);
389
390 v.emplace_back("taxicab", 1729);
391 EXPECT_GE(v.capacity(), 2u);
392 v.shrink_to_fit();
393 EXPECT_EQ(v.capacity(), 2u);
394
395 v.reserve(100);
396 EXPECT_GE(v.capacity(), 100u);
397 v.shrink_to_fit();
398 EXPECT_EQ(v.capacity(), 2u);
399 }
400
TEST(InlinedVectorTest,ShrinkToFitEdgeCases)401 TEST(InlinedVectorTest, ShrinkToFitEdgeCases) {
402 {
403 absl::InlinedVector<std::pair<std::string, int>, 1> v;
404 v.emplace_back("answer", 42);
405 v.emplace_back("taxicab", 1729);
406 EXPECT_GE(v.capacity(), 2u);
407 v.pop_back();
408 v.shrink_to_fit();
409 EXPECT_EQ(v.capacity(), 1u);
410 EXPECT_EQ(v[0].first, "answer");
411 EXPECT_EQ(v[0].second, 42);
412 }
413
414 {
415 absl::InlinedVector<std::string, 2> v(100);
416 v.resize(0);
417 v.shrink_to_fit();
418 EXPECT_EQ(v.capacity(), 2u); // inlined capacity
419 }
420
421 {
422 absl::InlinedVector<std::string, 2> v(100);
423 v.resize(1);
424 v.shrink_to_fit();
425 EXPECT_EQ(v.capacity(), 2u); // inlined capacity
426 }
427
428 {
429 absl::InlinedVector<std::string, 2> v(100);
430 v.resize(2);
431 v.shrink_to_fit();
432 EXPECT_EQ(v.capacity(), 2u);
433 }
434
435 {
436 absl::InlinedVector<std::string, 2> v(100);
437 v.resize(3);
438 v.shrink_to_fit();
439 EXPECT_EQ(v.capacity(), 3u);
440 }
441 }
442
TEST(IntVec,Insert)443 TEST(IntVec, Insert) {
444 for (size_t len = 0; len < 20; len++) {
445 for (ptrdiff_t pos = 0; pos <= static_cast<ptrdiff_t>(len); pos++) {
446 {
447 // Single element
448 std::vector<int> std_v;
449 Fill(&std_v, len);
450 IntVec v;
451 Fill(&v, len);
452
453 std_v.insert(std_v.begin() + pos, 9999);
454 IntVec::iterator it = v.insert(v.cbegin() + pos, 9999);
455 EXPECT_THAT(v, ElementsAreArray(std_v));
456 EXPECT_EQ(it, v.cbegin() + pos);
457 }
458 {
459 // n elements
460 std::vector<int> std_v;
461 Fill(&std_v, len);
462 IntVec v;
463 Fill(&v, len);
464
465 IntVec::size_type n = 5;
466 std_v.insert(std_v.begin() + pos, n, 9999);
467 IntVec::iterator it = v.insert(v.cbegin() + pos, n, 9999);
468 EXPECT_THAT(v, ElementsAreArray(std_v));
469 EXPECT_EQ(it, v.cbegin() + pos);
470 }
471 {
472 // Iterator range (random access iterator)
473 std::vector<int> std_v;
474 Fill(&std_v, len);
475 IntVec v;
476 Fill(&v, len);
477
478 const std::vector<int> input = {9999, 8888, 7777};
479 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend());
480 IntVec::iterator it =
481 v.insert(v.cbegin() + pos, input.cbegin(), input.cend());
482 EXPECT_THAT(v, ElementsAreArray(std_v));
483 EXPECT_EQ(it, v.cbegin() + pos);
484 }
485 {
486 // Iterator range (forward iterator)
487 std::vector<int> std_v;
488 Fill(&std_v, len);
489 IntVec v;
490 Fill(&v, len);
491
492 const std::forward_list<int> input = {9999, 8888, 7777};
493 std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend());
494 IntVec::iterator it =
495 v.insert(v.cbegin() + pos, input.cbegin(), input.cend());
496 EXPECT_THAT(v, ElementsAreArray(std_v));
497 EXPECT_EQ(it, v.cbegin() + pos);
498 }
499 {
500 // Iterator range (input iterator)
501 std::vector<int> std_v;
502 Fill(&std_v, len);
503 IntVec v;
504 Fill(&v, len);
505
506 std_v.insert(std_v.begin() + pos, {9999, 8888, 7777});
507 std::istringstream input("9999 8888 7777");
508 IntVec::iterator it =
509 v.insert(v.cbegin() + pos, std::istream_iterator<int>(input),
510 std::istream_iterator<int>());
511 EXPECT_THAT(v, ElementsAreArray(std_v));
512 EXPECT_EQ(it, v.cbegin() + pos);
513 }
514 {
515 // Initializer list
516 std::vector<int> std_v;
517 Fill(&std_v, len);
518 IntVec v;
519 Fill(&v, len);
520
521 std_v.insert(std_v.begin() + pos, {9999, 8888});
522 IntVec::iterator it = v.insert(v.cbegin() + pos, {9999, 8888});
523 EXPECT_THAT(v, ElementsAreArray(std_v));
524 EXPECT_EQ(it, v.cbegin() + pos);
525 }
526 }
527 }
528 }
529
TEST(RefCountedVec,InsertConstructorDestructor)530 TEST(RefCountedVec, InsertConstructorDestructor) {
531 // Make sure the proper construction/destruction happen during insert
532 // operations.
533 for (size_t len = 0; len < 20; len++) {
534 SCOPED_TRACE(len);
535 for (size_t pos = 0; pos <= len; pos++) {
536 SCOPED_TRACE(pos);
537 std::vector<int> counts(len, 0);
538 int inserted_count = 0;
539 RefCountedVec v;
540 for (size_t i = 0; i < len; ++i) {
541 SCOPED_TRACE(i);
542 v.push_back(RefCounted(static_cast<int>(i), &counts[i]));
543 }
544
545 EXPECT_THAT(counts, Each(Eq(1)));
546
547 RefCounted insert_element(9999, &inserted_count);
548 EXPECT_EQ(1, inserted_count);
549 v.insert(v.begin() + pos, insert_element);
550 EXPECT_EQ(2, inserted_count);
551 // Check that the elements at the end are preserved.
552 EXPECT_THAT(counts, Each(Eq(1)));
553 EXPECT_EQ(2, inserted_count);
554 }
555 }
556 }
557
TEST(IntVec,Resize)558 TEST(IntVec, Resize) {
559 for (size_t len = 0; len < 20; len++) {
560 IntVec v;
561 Fill(&v, len);
562
563 // Try resizing up and down by k elements
564 static const int kResizeElem = 1000000;
565 for (size_t k = 0; k < 10; k++) {
566 // Enlarging resize
567 v.resize(len + k, kResizeElem);
568 EXPECT_EQ(len + k, v.size());
569 EXPECT_LE(len + k, v.capacity());
570 for (size_t i = 0; i < len + k; i++) {
571 if (i < len) {
572 EXPECT_EQ(static_cast<int>(i), v[i]);
573 } else {
574 EXPECT_EQ(kResizeElem, v[i]);
575 }
576 }
577
578 // Shrinking resize
579 v.resize(len, kResizeElem);
580 EXPECT_EQ(len, v.size());
581 EXPECT_LE(len, v.capacity());
582 for (size_t i = 0; i < len; i++) {
583 EXPECT_EQ(static_cast<int>(i), v[i]);
584 }
585 }
586 }
587 }
588
TEST(IntVec,InitWithLength)589 TEST(IntVec, InitWithLength) {
590 for (size_t len = 0; len < 20; len++) {
591 IntVec v(len, 7);
592 EXPECT_EQ(len, v.size());
593 EXPECT_LE(len, v.capacity());
594 for (size_t i = 0; i < len; i++) {
595 EXPECT_EQ(7, v[i]);
596 }
597 }
598 }
599
TEST(IntVec,CopyConstructorAndAssignment)600 TEST(IntVec, CopyConstructorAndAssignment) {
601 for (size_t len = 0; len < 20; len++) {
602 IntVec v;
603 Fill(&v, len);
604 EXPECT_EQ(len, v.size());
605 EXPECT_LE(len, v.capacity());
606
607 IntVec v2(v);
608 EXPECT_TRUE(v == v2) << PrintToString(v) << PrintToString(v2);
609
610 for (size_t start_len = 0; start_len < 20; start_len++) {
611 IntVec v3;
612 Fill(&v3, start_len, 99); // Add dummy elements that should go away
613 v3 = v;
614 EXPECT_TRUE(v == v3) << PrintToString(v) << PrintToString(v3);
615 }
616 }
617 }
618
TEST(IntVec,AliasingCopyAssignment)619 TEST(IntVec, AliasingCopyAssignment) {
620 for (size_t len = 0; len < 20; ++len) {
621 IntVec original;
622 Fill(&original, len);
623 IntVec dup = original;
624 dup = *&dup;
625 EXPECT_EQ(dup, original);
626 }
627 }
628
TEST(IntVec,MoveConstructorAndAssignment)629 TEST(IntVec, MoveConstructorAndAssignment) {
630 for (size_t len = 0; len < 20; len++) {
631 IntVec v_in;
632 const size_t inlined_capacity = v_in.capacity();
633 Fill(&v_in, len);
634 EXPECT_EQ(len, v_in.size());
635 EXPECT_LE(len, v_in.capacity());
636
637 {
638 IntVec v_temp(v_in);
639 auto* old_data = v_temp.data();
640 IntVec v_out(std::move(v_temp));
641 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out);
642 if (v_in.size() > inlined_capacity) {
643 // Allocation is moved as a whole, data stays in place.
644 EXPECT_TRUE(v_out.data() == old_data);
645 } else {
646 EXPECT_FALSE(v_out.data() == old_data);
647 }
648 }
649 for (size_t start_len = 0; start_len < 20; start_len++) {
650 IntVec v_out;
651 Fill(&v_out, start_len, 99); // Add dummy elements that should go away
652 IntVec v_temp(v_in);
653 auto* old_data = v_temp.data();
654 v_out = std::move(v_temp);
655 EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out);
656 if (v_in.size() > inlined_capacity) {
657 // Allocation is moved as a whole, data stays in place.
658 EXPECT_TRUE(v_out.data() == old_data);
659 } else {
660 EXPECT_FALSE(v_out.data() == old_data);
661 }
662 }
663 }
664 }
665
666 class NotTriviallyDestructible {
667 public:
NotTriviallyDestructible()668 NotTriviallyDestructible() : p_(new int(1)) {}
NotTriviallyDestructible(int i)669 explicit NotTriviallyDestructible(int i) : p_(new int(i)) {}
670
NotTriviallyDestructible(const NotTriviallyDestructible & other)671 NotTriviallyDestructible(const NotTriviallyDestructible& other)
672 : p_(new int(*other.p_)) {}
673
operator =(const NotTriviallyDestructible & other)674 NotTriviallyDestructible& operator=(const NotTriviallyDestructible& other) {
675 p_ = absl::make_unique<int>(*other.p_);
676 return *this;
677 }
678
operator ==(const NotTriviallyDestructible & other) const679 bool operator==(const NotTriviallyDestructible& other) const {
680 return *p_ == *other.p_;
681 }
682
683 private:
684 std::unique_ptr<int> p_;
685 };
686
TEST(AliasingTest,Emplace)687 TEST(AliasingTest, Emplace) {
688 for (size_t i = 2; i < 20; ++i) {
689 absl::InlinedVector<NotTriviallyDestructible, 10> vec;
690 for (size_t j = 0; j < i; ++j) {
691 vec.push_back(NotTriviallyDestructible(static_cast<int>(j)));
692 }
693 vec.emplace(vec.begin(), vec[0]);
694 EXPECT_EQ(vec[0], vec[1]);
695 vec.emplace(vec.begin() + i / 2, vec[i / 2]);
696 EXPECT_EQ(vec[i / 2], vec[i / 2 + 1]);
697 vec.emplace(vec.end() - 1, vec.back());
698 EXPECT_EQ(vec[vec.size() - 2], vec.back());
699 }
700 }
701
TEST(AliasingTest,InsertWithCount)702 TEST(AliasingTest, InsertWithCount) {
703 for (size_t i = 1; i < 20; ++i) {
704 absl::InlinedVector<NotTriviallyDestructible, 10> vec;
705 for (size_t j = 0; j < i; ++j) {
706 vec.push_back(NotTriviallyDestructible(static_cast<int>(j)));
707 }
708 for (size_t n = 0; n < 5; ++n) {
709 // We use back where we can because it's guaranteed to become invalidated
710 vec.insert(vec.begin(), n, vec.back());
711 auto b = vec.begin();
712 EXPECT_TRUE(
713 std::all_of(b, b + n, [&vec](const NotTriviallyDestructible& x) {
714 return x == vec.back();
715 }));
716
717 auto m_idx = vec.size() / 2;
718 vec.insert(vec.begin() + m_idx, n, vec.back());
719 auto m = vec.begin() + m_idx;
720 EXPECT_TRUE(
721 std::all_of(m, m + n, [&vec](const NotTriviallyDestructible& x) {
722 return x == vec.back();
723 }));
724
725 // We want distinct values so the equality test is meaningful,
726 // vec[vec.size() - 1] is also almost always invalidated.
727 auto old_e = vec.size() - 1;
728 auto val = vec[old_e];
729 vec.insert(vec.end(), n, vec[old_e]);
730 auto e = vec.begin() + old_e;
731 EXPECT_TRUE(std::all_of(
732 e, e + n,
733 [&val](const NotTriviallyDestructible& x) { return x == val; }));
734 }
735 }
736 }
737
TEST(OverheadTest,Storage)738 TEST(OverheadTest, Storage) {
739 // Check for size overhead.
740 // In particular, ensure that std::allocator doesn't cost anything to store.
741 // The union should be absorbing some of the allocation bookkeeping overhead
742 // in the larger vectors, leaving only the size_ field as overhead.
743
744 struct T { void* val; };
745 size_t expected_overhead = sizeof(T);
746
747 EXPECT_EQ((2 * expected_overhead),
748 sizeof(absl::InlinedVector<T, 1>) - sizeof(T[1]));
749 EXPECT_EQ(expected_overhead,
750 sizeof(absl::InlinedVector<T, 2>) - sizeof(T[2]));
751 EXPECT_EQ(expected_overhead,
752 sizeof(absl::InlinedVector<T, 3>) - sizeof(T[3]));
753 EXPECT_EQ(expected_overhead,
754 sizeof(absl::InlinedVector<T, 4>) - sizeof(T[4]));
755 EXPECT_EQ(expected_overhead,
756 sizeof(absl::InlinedVector<T, 5>) - sizeof(T[5]));
757 EXPECT_EQ(expected_overhead,
758 sizeof(absl::InlinedVector<T, 6>) - sizeof(T[6]));
759 EXPECT_EQ(expected_overhead,
760 sizeof(absl::InlinedVector<T, 7>) - sizeof(T[7]));
761 EXPECT_EQ(expected_overhead,
762 sizeof(absl::InlinedVector<T, 8>) - sizeof(T[8]));
763 }
764
TEST(IntVec,Clear)765 TEST(IntVec, Clear) {
766 for (size_t len = 0; len < 20; len++) {
767 SCOPED_TRACE(len);
768 IntVec v;
769 Fill(&v, len);
770 v.clear();
771 EXPECT_EQ(0u, v.size());
772 EXPECT_EQ(v.begin(), v.end());
773 }
774 }
775
TEST(IntVec,Reserve)776 TEST(IntVec, Reserve) {
777 for (size_t len = 0; len < 20; len++) {
778 IntVec v;
779 Fill(&v, len);
780
781 for (size_t newlen = 0; newlen < 100; newlen++) {
782 const int* start_rep = v.data();
783 v.reserve(newlen);
784 const int* final_rep = v.data();
785 if (newlen <= len) {
786 EXPECT_EQ(start_rep, final_rep);
787 }
788 EXPECT_LE(newlen, v.capacity());
789
790 // Filling up to newlen should not change rep
791 while (v.size() < newlen) {
792 v.push_back(0);
793 }
794 EXPECT_EQ(final_rep, v.data());
795 }
796 }
797 }
798
TEST(StringVec,SelfRefPushBack)799 TEST(StringVec, SelfRefPushBack) {
800 std::vector<std::string> std_v;
801 absl::InlinedVector<std::string, 4> v;
802 const std::string s = "A quite long string to ensure heap.";
803 std_v.push_back(s);
804 v.push_back(s);
805 for (int i = 0; i < 20; ++i) {
806 EXPECT_THAT(v, ElementsAreArray(std_v));
807
808 v.push_back(v.back());
809 std_v.push_back(std_v.back());
810 }
811 EXPECT_THAT(v, ElementsAreArray(std_v));
812 }
813
TEST(StringVec,SelfRefPushBackWithMove)814 TEST(StringVec, SelfRefPushBackWithMove) {
815 std::vector<std::string> std_v;
816 absl::InlinedVector<std::string, 4> v;
817 const std::string s = "A quite long string to ensure heap.";
818 std_v.push_back(s);
819 v.push_back(s);
820 for (int i = 0; i < 20; ++i) {
821 EXPECT_EQ(v.back(), std_v.back());
822
823 v.push_back(std::move(v.back()));
824 std_v.push_back(std::move(std_v.back()));
825 }
826 EXPECT_EQ(v.back(), std_v.back());
827 }
828
TEST(StringVec,SelfMove)829 TEST(StringVec, SelfMove) {
830 const std::string s = "A quite long string to ensure heap.";
831 for (int len = 0; len < 20; len++) {
832 SCOPED_TRACE(len);
833 absl::InlinedVector<std::string, 8> v;
834 for (int i = 0; i < len; ++i) {
835 SCOPED_TRACE(i);
836 v.push_back(s);
837 }
838 // Indirection necessary to avoid compiler warning.
839 v = std::move(*(&v));
840 // Ensure that the inlined vector is still in a valid state by copying it.
841 // We don't expect specific contents since a self-move results in an
842 // unspecified valid state.
843 std::vector<std::string> copy(v.begin(), v.end());
844 }
845 }
846
TEST(IntVec,Swap)847 TEST(IntVec, Swap) {
848 for (size_t l1 = 0; l1 < 20; l1++) {
849 SCOPED_TRACE(l1);
850 for (size_t l2 = 0; l2 < 20; l2++) {
851 SCOPED_TRACE(l2);
852 IntVec a = Fill(l1, 0);
853 IntVec b = Fill(l2, 100);
854 {
855 using std::swap;
856 swap(a, b);
857 }
858 EXPECT_EQ(l1, b.size());
859 EXPECT_EQ(l2, a.size());
860 for (size_t i = 0; i < l1; i++) {
861 SCOPED_TRACE(i);
862 EXPECT_EQ(static_cast<int>(i), b[i]);
863 }
864 for (size_t i = 0; i < l2; i++) {
865 SCOPED_TRACE(i);
866 EXPECT_EQ(100 + static_cast<int>(i), a[i]);
867 }
868 }
869 }
870 }
871
TYPED_TEST_P(InstanceTest,Swap)872 TYPED_TEST_P(InstanceTest, Swap) {
873 using Instance = TypeParam;
874 using InstanceVec = absl::InlinedVector<Instance, 8>;
875 for (size_t l1 = 0; l1 < 20; l1++) {
876 SCOPED_TRACE(l1);
877 for (size_t l2 = 0; l2 < 20; l2++) {
878 SCOPED_TRACE(l2);
879 InstanceTracker tracker;
880 InstanceVec a, b;
881 const size_t inlined_capacity = a.capacity();
882 auto min_len = std::min(l1, l2);
883 auto max_len = std::max(l1, l2);
884 for (size_t i = 0; i < l1; i++)
885 a.push_back(Instance(static_cast<int>(i)));
886 for (size_t i = 0; i < l2; i++)
887 b.push_back(Instance(100 + static_cast<int>(i)));
888 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2));
889 tracker.ResetCopiesMovesSwaps();
890 {
891 using std::swap;
892 swap(a, b);
893 }
894 EXPECT_EQ(tracker.instances(), static_cast<int>(l1 + l2));
895 if (a.size() > inlined_capacity && b.size() > inlined_capacity) {
896 EXPECT_EQ(tracker.swaps(), 0); // Allocations are swapped.
897 EXPECT_EQ(tracker.moves(), 0);
898 } else if (a.size() <= inlined_capacity && b.size() <= inlined_capacity) {
899 EXPECT_EQ(tracker.swaps(), static_cast<int>(min_len));
900 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()),
901 static_cast<int>(max_len - min_len));
902 } else {
903 // One is allocated and the other isn't. The allocation is transferred
904 // without copying elements, and the inlined instances are copied/moved.
905 EXPECT_EQ(tracker.swaps(), 0);
906 EXPECT_EQ((tracker.moves() ? tracker.moves() : tracker.copies()),
907 static_cast<int>(min_len));
908 }
909
910 EXPECT_EQ(l1, b.size());
911 EXPECT_EQ(l2, a.size());
912 for (size_t i = 0; i < l1; i++) {
913 EXPECT_EQ(static_cast<int>(i), b[i].value());
914 }
915 for (size_t i = 0; i < l2; i++) {
916 EXPECT_EQ(100 + static_cast<int>(i), a[i].value());
917 }
918 }
919 }
920 }
921
TEST(IntVec,EqualAndNotEqual)922 TEST(IntVec, EqualAndNotEqual) {
923 IntVec a, b;
924 EXPECT_TRUE(a == b);
925 EXPECT_FALSE(a != b);
926
927 a.push_back(3);
928 EXPECT_FALSE(a == b);
929 EXPECT_TRUE(a != b);
930
931 b.push_back(3);
932 EXPECT_TRUE(a == b);
933 EXPECT_FALSE(a != b);
934
935 b.push_back(7);
936 EXPECT_FALSE(a == b);
937 EXPECT_TRUE(a != b);
938
939 a.push_back(6);
940 EXPECT_FALSE(a == b);
941 EXPECT_TRUE(a != b);
942
943 a.clear();
944 b.clear();
945 for (size_t i = 0; i < 100; i++) {
946 a.push_back(static_cast<int>(i));
947 b.push_back(static_cast<int>(i));
948 EXPECT_TRUE(a == b);
949 EXPECT_FALSE(a != b);
950
951 b[i] = b[i] + 1;
952 EXPECT_FALSE(a == b);
953 EXPECT_TRUE(a != b);
954
955 b[i] = b[i] - 1; // Back to before
956 EXPECT_TRUE(a == b);
957 EXPECT_FALSE(a != b);
958 }
959 }
960
TEST(IntVec,RelationalOps)961 TEST(IntVec, RelationalOps) {
962 IntVec a, b;
963 EXPECT_FALSE(a < b);
964 EXPECT_FALSE(b < a);
965 EXPECT_FALSE(a > b);
966 EXPECT_FALSE(b > a);
967 EXPECT_TRUE(a <= b);
968 EXPECT_TRUE(b <= a);
969 EXPECT_TRUE(a >= b);
970 EXPECT_TRUE(b >= a);
971 b.push_back(3);
972 EXPECT_TRUE(a < b);
973 EXPECT_FALSE(b < a);
974 EXPECT_FALSE(a > b);
975 EXPECT_TRUE(b > a);
976 EXPECT_TRUE(a <= b);
977 EXPECT_FALSE(b <= a);
978 EXPECT_FALSE(a >= b);
979 EXPECT_TRUE(b >= a);
980 }
981
TYPED_TEST_P(InstanceTest,CountConstructorsDestructors)982 TYPED_TEST_P(InstanceTest, CountConstructorsDestructors) {
983 using Instance = TypeParam;
984 using InstanceVec = absl::InlinedVector<Instance, 8>;
985 InstanceTracker tracker;
986 for (size_t len = 0; len < 20; len++) {
987 SCOPED_TRACE(len);
988 tracker.ResetCopiesMovesSwaps();
989
990 InstanceVec v;
991 const size_t inlined_capacity = v.capacity();
992 for (size_t i = 0; i < len; i++) {
993 v.push_back(Instance(static_cast<int>(i)));
994 }
995 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
996 EXPECT_GE(tracker.copies() + tracker.moves(),
997 static_cast<int>(len)); // More due to reallocation.
998 tracker.ResetCopiesMovesSwaps();
999
1000 // Enlarging resize() must construct some objects
1001 tracker.ResetCopiesMovesSwaps();
1002 v.resize(len + 10, Instance(100));
1003 EXPECT_EQ(tracker.instances(), static_cast<int>(len) + 10);
1004 if (len <= inlined_capacity && len + 10 > inlined_capacity) {
1005 EXPECT_EQ(tracker.copies() + tracker.moves(), 10 + static_cast<int>(len));
1006 } else {
1007 // Only specify a minimum number of copies + moves. We don't want to
1008 // depend on the reallocation policy here.
1009 EXPECT_GE(tracker.copies() + tracker.moves(),
1010 10); // More due to reallocation.
1011 }
1012
1013 // Shrinking resize() must destroy some objects
1014 tracker.ResetCopiesMovesSwaps();
1015 v.resize(len, Instance(100));
1016 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1017 EXPECT_EQ(tracker.copies(), 0);
1018 EXPECT_EQ(tracker.moves(), 0);
1019
1020 // reserve() must not increase the number of initialized objects
1021 SCOPED_TRACE("reserve");
1022 v.reserve(len + 1000);
1023 EXPECT_EQ(tracker.instances(), static_cast<int>(len));
1024 EXPECT_EQ(tracker.copies() + tracker.moves(), static_cast<int>(len));
1025
1026 // pop_back() and erase() must destroy one object
1027 if (len > 0) {
1028 tracker.ResetCopiesMovesSwaps();
1029 v.pop_back();
1030 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 1);
1031 EXPECT_EQ(tracker.copies(), 0);
1032 EXPECT_EQ(tracker.moves(), 0);
1033
1034 if (!v.empty()) {
1035 tracker.ResetCopiesMovesSwaps();
1036 v.erase(v.begin());
1037 EXPECT_EQ(tracker.instances(), static_cast<int>(len) - 2);
1038 EXPECT_EQ(tracker.copies() + tracker.moves(),
1039 static_cast<int>(len) - 2);
1040 }
1041 }
1042
1043 tracker.ResetCopiesMovesSwaps();
1044 int instances_before_empty_erase = tracker.instances();
1045 v.erase(v.begin(), v.begin());
1046 EXPECT_EQ(tracker.instances(), instances_before_empty_erase);
1047 EXPECT_EQ(tracker.copies() + tracker.moves(), 0);
1048 }
1049 }
1050
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnCopyConstruction)1051 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnCopyConstruction) {
1052 using Instance = TypeParam;
1053 using InstanceVec = absl::InlinedVector<Instance, 8>;
1054 InstanceTracker tracker;
1055 for (int len = 0; len < 20; len++) {
1056 SCOPED_TRACE(len);
1057 tracker.ResetCopiesMovesSwaps();
1058
1059 InstanceVec v;
1060 for (int i = 0; i < len; i++) {
1061 v.push_back(Instance(i));
1062 }
1063 EXPECT_EQ(tracker.instances(), len);
1064 EXPECT_GE(tracker.copies() + tracker.moves(),
1065 len); // More due to reallocation.
1066 tracker.ResetCopiesMovesSwaps();
1067 { // Copy constructor should create 'len' more instances.
1068 InstanceVec v_copy(v);
1069 EXPECT_EQ(tracker.instances(), len + len);
1070 EXPECT_EQ(tracker.copies(), len);
1071 EXPECT_EQ(tracker.moves(), 0);
1072 }
1073 EXPECT_EQ(tracker.instances(), len);
1074 }
1075 }
1076
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnMoveConstruction)1077 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveConstruction) {
1078 using Instance = TypeParam;
1079 using InstanceVec = absl::InlinedVector<Instance, 8>;
1080 InstanceTracker tracker;
1081 for (int len = 0; len < 20; len++) {
1082 SCOPED_TRACE(len);
1083 tracker.ResetCopiesMovesSwaps();
1084
1085 InstanceVec v;
1086 const size_t inlined_capacity = v.capacity();
1087 for (int i = 0; i < len; i++) {
1088 v.push_back(Instance(i));
1089 }
1090 EXPECT_EQ(tracker.instances(), len);
1091 EXPECT_GE(tracker.copies() + tracker.moves(),
1092 len); // More due to reallocation.
1093 tracker.ResetCopiesMovesSwaps();
1094 {
1095 InstanceVec v_copy(std::move(v));
1096 if (static_cast<size_t>(len) > inlined_capacity) {
1097 // Allocation is moved as a whole.
1098 EXPECT_EQ(tracker.instances(), len);
1099 EXPECT_EQ(tracker.live_instances(), len);
1100 // Tests an implementation detail, don't rely on this in your code.
1101 EXPECT_EQ(v.size(), 0u); // NOLINT misc-use-after-move
1102 EXPECT_EQ(tracker.copies(), 0);
1103 EXPECT_EQ(tracker.moves(), 0);
1104 } else {
1105 EXPECT_EQ(tracker.instances(), len + len);
1106 if (Instance::supports_move()) {
1107 EXPECT_EQ(tracker.live_instances(), len);
1108 EXPECT_EQ(tracker.copies(), 0);
1109 EXPECT_EQ(tracker.moves(), len);
1110 } else {
1111 EXPECT_EQ(tracker.live_instances(), len + len);
1112 EXPECT_EQ(tracker.copies(), len);
1113 EXPECT_EQ(tracker.moves(), 0);
1114 }
1115 }
1116 EXPECT_EQ(tracker.swaps(), 0);
1117 }
1118 }
1119 }
1120
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnAssignment)1121 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnAssignment) {
1122 using Instance = TypeParam;
1123 using InstanceVec = absl::InlinedVector<Instance, 8>;
1124 InstanceTracker tracker;
1125 for (int len = 0; len < 20; len++) {
1126 SCOPED_TRACE(len);
1127 for (int longorshort = 0; longorshort <= 1; ++longorshort) {
1128 SCOPED_TRACE(longorshort);
1129 tracker.ResetCopiesMovesSwaps();
1130
1131 InstanceVec longer, shorter;
1132 for (int i = 0; i < len; i++) {
1133 longer.push_back(Instance(i));
1134 shorter.push_back(Instance(i));
1135 }
1136 longer.push_back(Instance(len));
1137 EXPECT_EQ(tracker.instances(), len + len + 1);
1138 EXPECT_GE(tracker.copies() + tracker.moves(),
1139 len + len + 1); // More due to reallocation.
1140
1141 tracker.ResetCopiesMovesSwaps();
1142 if (longorshort) {
1143 shorter = longer;
1144 EXPECT_EQ(tracker.instances(), (len + 1) + (len + 1));
1145 EXPECT_GE(tracker.copies() + tracker.moves(),
1146 len + 1); // More due to reallocation.
1147 } else {
1148 longer = shorter;
1149 EXPECT_EQ(tracker.instances(), len + len);
1150 EXPECT_EQ(tracker.copies() + tracker.moves(), len);
1151 }
1152 }
1153 }
1154 }
1155
TYPED_TEST_P(InstanceTest,CountConstructorsDestructorsOnMoveAssignment)1156 TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveAssignment) {
1157 using Instance = TypeParam;
1158 using InstanceVec = absl::InlinedVector<Instance, 8>;
1159 InstanceTracker tracker;
1160 for (int len = 0; len < 20; len++) {
1161 SCOPED_TRACE(len);
1162 for (int longorshort = 0; longorshort <= 1; ++longorshort) {
1163 SCOPED_TRACE(longorshort);
1164 tracker.ResetCopiesMovesSwaps();
1165
1166 InstanceVec longer, shorter;
1167 const size_t inlined_capacity = longer.capacity();
1168 for (int i = 0; i < len; i++) {
1169 longer.push_back(Instance(i));
1170 shorter.push_back(Instance(i));
1171 }
1172 longer.push_back(Instance(len));
1173 EXPECT_EQ(tracker.instances(), len + len + 1);
1174 EXPECT_GE(tracker.copies() + tracker.moves(),
1175 len + len + 1); // More due to reallocation.
1176
1177 tracker.ResetCopiesMovesSwaps();
1178 int src_len;
1179 if (longorshort) {
1180 src_len = len + 1;
1181 shorter = std::move(longer);
1182 } else {
1183 src_len = len;
1184 longer = std::move(shorter);
1185 }
1186 if (static_cast<size_t>(src_len) > inlined_capacity) {
1187 // Allocation moved as a whole.
1188 EXPECT_EQ(tracker.instances(), src_len);
1189 EXPECT_EQ(tracker.live_instances(), src_len);
1190 EXPECT_EQ(tracker.copies(), 0);
1191 EXPECT_EQ(tracker.moves(), 0);
1192 } else {
1193 // Elements are all copied.
1194 EXPECT_EQ(tracker.instances(), src_len + src_len);
1195 if (Instance::supports_move()) {
1196 EXPECT_EQ(tracker.copies(), 0);
1197 EXPECT_EQ(tracker.moves(), src_len);
1198 EXPECT_EQ(tracker.live_instances(), src_len);
1199 } else {
1200 EXPECT_EQ(tracker.copies(), src_len);
1201 EXPECT_EQ(tracker.moves(), 0);
1202 EXPECT_EQ(tracker.live_instances(), src_len + src_len);
1203 }
1204 }
1205 EXPECT_EQ(tracker.swaps(), 0);
1206 }
1207 }
1208 }
1209
TEST(CountElemAssign,SimpleTypeWithInlineBacking)1210 TEST(CountElemAssign, SimpleTypeWithInlineBacking) {
1211 const size_t inlined_capacity = absl::InlinedVector<int, 2>().capacity();
1212
1213 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1214 SCOPED_TRACE(original_size);
1215 // Original contents are [12345, 12345, ...]
1216 std::vector<int> original_contents(original_size, 12345);
1217
1218 absl::InlinedVector<int, 2> v(original_contents.begin(),
1219 original_contents.end());
1220 v.assign(2, 123);
1221 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(123, 123)));
1222 if (original_size <= inlined_capacity) {
1223 // If the original had inline backing, it should stay inline.
1224 EXPECT_EQ(v.capacity(), inlined_capacity);
1225 }
1226 }
1227 }
1228
TEST(CountElemAssign,SimpleTypeWithAllocation)1229 TEST(CountElemAssign, SimpleTypeWithAllocation) {
1230 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1231 SCOPED_TRACE(original_size);
1232 // Original contents are [12345, 12345, ...]
1233 std::vector<int> original_contents(original_size, 12345);
1234
1235 absl::InlinedVector<int, 2> v(original_contents.begin(),
1236 original_contents.end());
1237 v.assign(3, 123);
1238 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(123, 123, 123)));
1239 EXPECT_LE(v.size(), v.capacity());
1240 }
1241 }
1242
TYPED_TEST_P(InstanceTest,CountElemAssignInlineBacking)1243 TYPED_TEST_P(InstanceTest, CountElemAssignInlineBacking) {
1244 using Instance = TypeParam;
1245 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1246 SCOPED_TRACE(original_size);
1247 // Original contents are [12345, 12345, ...]
1248 std::vector<Instance> original_contents(original_size, Instance(12345));
1249
1250 absl::InlinedVector<Instance, 2> v(original_contents.begin(),
1251 original_contents.end());
1252 v.assign(2, Instance(123));
1253 EXPECT_THAT(v, AllOf(SizeIs(2u), ElementsAre(ValueIs(123), ValueIs(123))));
1254 if (original_size <= 2) {
1255 // If the original had inline backing, it should stay inline.
1256 EXPECT_EQ(2u, v.capacity());
1257 }
1258 }
1259 }
1260
1261 template <typename Instance>
InstanceCountElemAssignWithAllocationTest()1262 void InstanceCountElemAssignWithAllocationTest() {
1263 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1264 SCOPED_TRACE(original_size);
1265 // Original contents are [12345, 12345, ...]
1266 std::vector<Instance> original_contents(original_size, Instance(12345));
1267
1268 absl::InlinedVector<Instance, 2> v(original_contents.begin(),
1269 original_contents.end());
1270 v.assign(3, Instance(123));
1271 EXPECT_THAT(v, AllOf(SizeIs(3u), ElementsAre(ValueIs(123), ValueIs(123),
1272 ValueIs(123))));
1273 EXPECT_LE(v.size(), v.capacity());
1274 }
1275 }
TEST(CountElemAssign,WithAllocationCopyableInstance)1276 TEST(CountElemAssign, WithAllocationCopyableInstance) {
1277 InstanceCountElemAssignWithAllocationTest<CopyableOnlyInstance>();
1278 }
TEST(CountElemAssign,WithAllocationCopyableMovableInstance)1279 TEST(CountElemAssign, WithAllocationCopyableMovableInstance) {
1280 InstanceCountElemAssignWithAllocationTest<CopyableMovableInstance>();
1281 }
1282
TEST(RangedConstructor,SimpleType)1283 TEST(RangedConstructor, SimpleType) {
1284 std::vector<int> source_v = {4, 5, 6};
1285 // First try to fit in inline backing
1286 absl::InlinedVector<int, 4> v(source_v.begin(), source_v.end());
1287 EXPECT_EQ(3u, v.size());
1288 EXPECT_EQ(4u,
1289 v.capacity()); // Indication that we're still on inlined storage
1290 EXPECT_EQ(4, v[0]);
1291 EXPECT_EQ(5, v[1]);
1292 EXPECT_EQ(6, v[2]);
1293
1294 // Now, force a re-allocate
1295 absl::InlinedVector<int, 2> realloc_v(source_v.begin(), source_v.end());
1296 EXPECT_EQ(3u, realloc_v.size());
1297 EXPECT_LT(2u, realloc_v.capacity());
1298 EXPECT_EQ(4, realloc_v[0]);
1299 EXPECT_EQ(5, realloc_v[1]);
1300 EXPECT_EQ(6, realloc_v[2]);
1301 }
1302
1303 // Test for ranged constructors using Instance as the element type and
1304 // SourceContainer as the source container type.
1305 template <typename Instance, typename SourceContainer, int inlined_capacity>
InstanceRangedConstructorTestForContainer()1306 void InstanceRangedConstructorTestForContainer() {
1307 InstanceTracker tracker;
1308 SourceContainer source_v = {Instance(0), Instance(1)};
1309 tracker.ResetCopiesMovesSwaps();
1310 absl::InlinedVector<Instance, inlined_capacity> v(source_v.begin(),
1311 source_v.end());
1312 EXPECT_EQ(2u, v.size());
1313 EXPECT_LT(1u, v.capacity());
1314 EXPECT_EQ(0, v[0].value());
1315 EXPECT_EQ(1, v[1].value());
1316 EXPECT_EQ(tracker.copies(), 2);
1317 EXPECT_EQ(tracker.moves(), 0);
1318 }
1319
1320 template <typename Instance, int inlined_capacity>
InstanceRangedConstructorTestWithCapacity()1321 void InstanceRangedConstructorTestWithCapacity() {
1322 // Test with const and non-const, random access and non-random-access sources.
1323 // TODO(bsamwel): Test with an input iterator source.
1324 {
1325 SCOPED_TRACE("std::list");
1326 InstanceRangedConstructorTestForContainer<Instance, std::list<Instance>,
1327 inlined_capacity>();
1328 {
1329 SCOPED_TRACE("const std::list");
1330 InstanceRangedConstructorTestForContainer<
1331 Instance, const std::list<Instance>, inlined_capacity>();
1332 }
1333 {
1334 SCOPED_TRACE("std::vector");
1335 InstanceRangedConstructorTestForContainer<Instance, std::vector<Instance>,
1336 inlined_capacity>();
1337 }
1338 {
1339 SCOPED_TRACE("const std::vector");
1340 InstanceRangedConstructorTestForContainer<
1341 Instance, const std::vector<Instance>, inlined_capacity>();
1342 }
1343 }
1344 }
1345
TYPED_TEST_P(InstanceTest,RangedConstructor)1346 TYPED_TEST_P(InstanceTest, RangedConstructor) {
1347 using Instance = TypeParam;
1348 SCOPED_TRACE("capacity=1");
1349 InstanceRangedConstructorTestWithCapacity<Instance, 1>();
1350 SCOPED_TRACE("capacity=2");
1351 InstanceRangedConstructorTestWithCapacity<Instance, 2>();
1352 }
1353
TEST(RangedConstructor,ElementsAreConstructed)1354 TEST(RangedConstructor, ElementsAreConstructed) {
1355 std::vector<std::string> source_v = {"cat", "dog"};
1356
1357 // Force expansion and re-allocation of v. Ensures that when the vector is
1358 // expanded that new elements are constructed.
1359 absl::InlinedVector<std::string, 1> v(source_v.begin(), source_v.end());
1360 EXPECT_EQ("cat", v[0]);
1361 EXPECT_EQ("dog", v[1]);
1362 }
1363
TEST(RangedAssign,SimpleType)1364 TEST(RangedAssign, SimpleType) {
1365 const size_t inlined_capacity = absl::InlinedVector<int, 3>().capacity();
1366
1367 // Test for all combinations of original sizes (empty and non-empty inline,
1368 // and out of line) and target sizes.
1369 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1370 SCOPED_TRACE(original_size);
1371 // Original contents are [12345, 12345, ...]
1372 std::vector<int> original_contents(original_size, 12345);
1373
1374 for (size_t target_size = 0; target_size <= 5; ++target_size) {
1375 SCOPED_TRACE(target_size);
1376
1377 // New contents are [3, 4, ...]
1378 std::vector<int> new_contents;
1379 for (size_t i = 0; i < target_size; ++i) {
1380 new_contents.push_back(static_cast<int>(i + 3));
1381 }
1382
1383 absl::InlinedVector<int, 3> v(original_contents.begin(),
1384 original_contents.end());
1385 v.assign(new_contents.begin(), new_contents.end());
1386
1387 EXPECT_EQ(new_contents.size(), v.size());
1388 EXPECT_LE(new_contents.size(), v.capacity());
1389 if (target_size <= inlined_capacity &&
1390 original_size <= inlined_capacity) {
1391 // Storage should stay inline when target size is small.
1392 EXPECT_EQ(v.capacity(), inlined_capacity);
1393 }
1394 EXPECT_THAT(v, ElementsAreArray(new_contents));
1395 }
1396 }
1397 }
1398
1399 // Returns true if lhs and rhs have the same value.
1400 template <typename Instance>
InstanceValuesEqual(const Instance & lhs,const Instance & rhs)1401 static bool InstanceValuesEqual(const Instance& lhs, const Instance& rhs) {
1402 return lhs.value() == rhs.value();
1403 }
1404
1405 // Test for ranged assign() using Instance as the element type and
1406 // SourceContainer as the source container type.
1407 template <typename Instance, typename SourceContainer>
InstanceRangedAssignTestForContainer()1408 void InstanceRangedAssignTestForContainer() {
1409 // Test for all combinations of original sizes (empty and non-empty inline,
1410 // and out of line) and target sizes.
1411 for (size_t original_size = 0; original_size <= 5; ++original_size) {
1412 SCOPED_TRACE(original_size);
1413 // Original contents are [12345, 12345, ...]
1414 std::vector<Instance> original_contents(original_size, Instance(12345));
1415
1416 for (size_t target_size = 0; target_size <= 5; ++target_size) {
1417 SCOPED_TRACE(target_size);
1418
1419 // New contents are [3, 4, ...]
1420 // Generate data using a non-const container, because SourceContainer
1421 // itself may be const.
1422 // TODO(bsamwel): Test with an input iterator.
1423 std::vector<Instance> new_contents_in;
1424 for (size_t i = 0; i < target_size; ++i) {
1425 new_contents_in.push_back(Instance(static_cast<int>(i) + 3));
1426 }
1427 SourceContainer new_contents(new_contents_in.begin(),
1428 new_contents_in.end());
1429
1430 absl::InlinedVector<Instance, 3> v(original_contents.begin(),
1431 original_contents.end());
1432 v.assign(new_contents.begin(), new_contents.end());
1433
1434 EXPECT_EQ(new_contents.size(), v.size());
1435 EXPECT_LE(new_contents.size(), v.capacity());
1436 if (target_size <= 3 && original_size <= 3) {
1437 // Storage should stay inline when target size is small.
1438 EXPECT_EQ(3u, v.capacity());
1439 }
1440 EXPECT_TRUE(std::equal(v.begin(), v.end(), new_contents.begin(),
1441 InstanceValuesEqual<Instance>));
1442 }
1443 }
1444 }
1445
TYPED_TEST_P(InstanceTest,RangedAssign)1446 TYPED_TEST_P(InstanceTest, RangedAssign) {
1447 using Instance = TypeParam;
1448 // Test with const and non-const, random access and non-random-access sources.
1449 // TODO(bsamwel): Test with an input iterator source.
1450 SCOPED_TRACE("std::list");
1451 InstanceRangedAssignTestForContainer<Instance, std::list<Instance>>();
1452 SCOPED_TRACE("const std::list");
1453 InstanceRangedAssignTestForContainer<Instance, const std::list<Instance>>();
1454 SCOPED_TRACE("std::vector");
1455 InstanceRangedAssignTestForContainer<Instance, std::vector<Instance>>();
1456 SCOPED_TRACE("const std::vector");
1457 InstanceRangedAssignTestForContainer<Instance, const std::vector<Instance>>();
1458 }
1459
TEST(InitializerListConstructor,SimpleTypeWithInlineBacking)1460 TEST(InitializerListConstructor, SimpleTypeWithInlineBacking) {
1461 EXPECT_THAT((absl::InlinedVector<int, 4>{4, 5, 6}),
1462 AllOf(SizeIs(3u), CapacityIs(4u), ElementsAre(4, 5, 6)));
1463 }
1464
TEST(InitializerListConstructor,SimpleTypeWithReallocationRequired)1465 TEST(InitializerListConstructor, SimpleTypeWithReallocationRequired) {
1466 EXPECT_THAT((absl::InlinedVector<int, 2>{4, 5, 6}),
1467 AllOf(SizeIs(3u), CapacityIs(Gt(2u)), ElementsAre(4, 5, 6)));
1468 }
1469
TEST(InitializerListConstructor,DisparateTypesInList)1470 TEST(InitializerListConstructor, DisparateTypesInList) {
1471 EXPECT_THAT((absl::InlinedVector<int, 2>{-7, 8ULL}), ElementsAre(-7, 8));
1472
1473 EXPECT_THAT((absl::InlinedVector<std::string, 2>{"foo", std::string("bar")}),
1474 ElementsAre("foo", "bar"));
1475 }
1476
TEST(InitializerListConstructor,ComplexTypeWithInlineBacking)1477 TEST(InitializerListConstructor, ComplexTypeWithInlineBacking) {
1478 const size_t inlined_capacity =
1479 absl::InlinedVector<CopyableMovableInstance, 1>().capacity();
1480 EXPECT_THAT(
1481 (absl::InlinedVector<CopyableMovableInstance, 1>{
1482 CopyableMovableInstance(0)}),
1483 AllOf(SizeIs(1u), CapacityIs(inlined_capacity), ElementsAre(ValueIs(0))));
1484 }
1485
TEST(InitializerListConstructor,ComplexTypeWithReallocationRequired)1486 TEST(InitializerListConstructor, ComplexTypeWithReallocationRequired) {
1487 EXPECT_THAT((absl::InlinedVector<CopyableMovableInstance, 1>{
1488 CopyableMovableInstance(0), CopyableMovableInstance(1)}),
1489 AllOf(SizeIs(2u), CapacityIs(Gt(1u)),
1490 ElementsAre(ValueIs(0), ValueIs(1))));
1491 }
1492
TEST(InitializerListAssign,SimpleTypeFitsInlineBacking)1493 TEST(InitializerListAssign, SimpleTypeFitsInlineBacking) {
1494 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1495 SCOPED_TRACE(original_size);
1496
1497 absl::InlinedVector<int, 2> v1(original_size, 12345);
1498 const size_t original_capacity_v1 = v1.capacity();
1499 v1.assign({3});
1500 EXPECT_THAT(v1, AllOf(SizeIs(1u), CapacityIs(original_capacity_v1),
1501 ElementsAre(3)));
1502
1503 absl::InlinedVector<int, 2> v2(original_size, 12345);
1504 const size_t original_capacity_v2 = v2.capacity();
1505 v2 = {3};
1506 EXPECT_THAT(v2, AllOf(SizeIs(1u), CapacityIs(original_capacity_v2),
1507 ElementsAre(3)));
1508 }
1509 }
1510
TEST(InitializerListAssign,SimpleTypeDoesNotFitInlineBacking)1511 TEST(InitializerListAssign, SimpleTypeDoesNotFitInlineBacking) {
1512 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1513 SCOPED_TRACE(original_size);
1514 absl::InlinedVector<int, 2> v1(original_size, 12345);
1515 v1.assign({3, 4, 5});
1516 EXPECT_THAT(v1, AllOf(SizeIs(3u), ElementsAre(3, 4, 5)));
1517 EXPECT_LE(3u, v1.capacity());
1518
1519 absl::InlinedVector<int, 2> v2(original_size, 12345);
1520 v2 = {3, 4, 5};
1521 EXPECT_THAT(v2, AllOf(SizeIs(3u), ElementsAre(3, 4, 5)));
1522 EXPECT_LE(3u, v2.capacity());
1523 }
1524 }
1525
TEST(InitializerListAssign,DisparateTypesInList)1526 TEST(InitializerListAssign, DisparateTypesInList) {
1527 absl::InlinedVector<int, 2> v_int1;
1528 v_int1.assign({-7, 8ULL});
1529 EXPECT_THAT(v_int1, ElementsAre(-7, 8));
1530
1531 absl::InlinedVector<int, 2> v_int2;
1532 v_int2 = {-7, 8ULL};
1533 EXPECT_THAT(v_int2, ElementsAre(-7, 8));
1534
1535 absl::InlinedVector<std::string, 2> v_string1;
1536 v_string1.assign({"foo", std::string("bar")});
1537 EXPECT_THAT(v_string1, ElementsAre("foo", "bar"));
1538
1539 absl::InlinedVector<std::string, 2> v_string2;
1540 v_string2 = {"foo", std::string("bar")};
1541 EXPECT_THAT(v_string2, ElementsAre("foo", "bar"));
1542 }
1543
TYPED_TEST_P(InstanceTest,InitializerListAssign)1544 TYPED_TEST_P(InstanceTest, InitializerListAssign) {
1545 using Instance = TypeParam;
1546 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1547 SCOPED_TRACE(original_size);
1548 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
1549 const size_t original_capacity = v.capacity();
1550 v.assign({Instance(3)});
1551 EXPECT_THAT(v, AllOf(SizeIs(1u), CapacityIs(original_capacity),
1552 ElementsAre(ValueIs(3))));
1553 }
1554 for (size_t original_size = 0; original_size <= 4; ++original_size) {
1555 SCOPED_TRACE(original_size);
1556 absl::InlinedVector<Instance, 2> v(original_size, Instance(12345));
1557 v.assign({Instance(3), Instance(4), Instance(5)});
1558 EXPECT_THAT(
1559 v, AllOf(SizeIs(3u), ElementsAre(ValueIs(3), ValueIs(4), ValueIs(5))));
1560 EXPECT_LE(3u, v.capacity());
1561 }
1562 }
1563
1564 REGISTER_TYPED_TEST_SUITE_P(InstanceTest, Swap, CountConstructorsDestructors,
1565 CountConstructorsDestructorsOnCopyConstruction,
1566 CountConstructorsDestructorsOnMoveConstruction,
1567 CountConstructorsDestructorsOnAssignment,
1568 CountConstructorsDestructorsOnMoveAssignment,
1569 CountElemAssignInlineBacking, RangedConstructor,
1570 RangedAssign, InitializerListAssign);
1571
1572 using InstanceTypes =
1573 ::testing::Types<CopyableOnlyInstance, CopyableMovableInstance>;
1574 INSTANTIATE_TYPED_TEST_SUITE_P(InstanceTestOnTypes, InstanceTest,
1575 InstanceTypes);
1576
TEST(DynamicVec,DynamicVecCompiles)1577 TEST(DynamicVec, DynamicVecCompiles) {
1578 DynamicVec v;
1579 (void)v;
1580 }
1581
TEST(AllocatorSupportTest,Constructors)1582 TEST(AllocatorSupportTest, Constructors) {
1583 using MyAlloc = CountingAllocator<int>;
1584 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1585 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
1586 int64_t allocated = 0;
1587 MyAlloc alloc(&allocated);
1588 { AllocVec ABSL_ATTRIBUTE_UNUSED v; }
1589 { AllocVec ABSL_ATTRIBUTE_UNUSED v(alloc); }
1590 { AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc); }
1591 { AllocVec ABSL_ATTRIBUTE_UNUSED v({1, 2, 3}, alloc); }
1592
1593 AllocVec v2;
1594 { AllocVec ABSL_ATTRIBUTE_UNUSED v(v2, alloc); }
1595 { AllocVec ABSL_ATTRIBUTE_UNUSED v(std::move(v2), alloc); }
1596 }
1597
TEST(AllocatorSupportTest,CountAllocations)1598 TEST(AllocatorSupportTest, CountAllocations) {
1599 using MyAlloc = CountingAllocator<int>;
1600 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1601 const int ia[] = {0, 1, 2, 3, 4, 5, 6, 7};
1602 int64_t allocated = 0;
1603 MyAlloc alloc(&allocated);
1604 {
1605 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + 4, alloc);
1606 EXPECT_THAT(allocated, Eq(0));
1607 }
1608 EXPECT_THAT(allocated, Eq(0));
1609 {
1610 AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc);
1611 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(v.size() * sizeof(int))));
1612 }
1613 EXPECT_THAT(allocated, Eq(0));
1614 {
1615 AllocVec v(4, 1, alloc);
1616 EXPECT_THAT(allocated, Eq(0));
1617
1618 int64_t allocated2 = 0;
1619 MyAlloc alloc2(&allocated2);
1620 AllocVec v2(v, alloc2);
1621 EXPECT_THAT(allocated2, Eq(0));
1622
1623 int64_t allocated3 = 0;
1624 MyAlloc alloc3(&allocated3);
1625 AllocVec v3(std::move(v), alloc3);
1626 EXPECT_THAT(allocated3, Eq(0));
1627 }
1628 EXPECT_THAT(allocated, 0);
1629 {
1630 AllocVec v(8, 2, alloc);
1631 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(v.size() * sizeof(int))));
1632
1633 int64_t allocated2 = 0;
1634 MyAlloc alloc2(&allocated2);
1635 AllocVec v2(v, alloc2);
1636 EXPECT_THAT(allocated2, Eq(static_cast<int64_t>(v2.size() * sizeof(int))));
1637
1638 int64_t allocated3 = 0;
1639 MyAlloc alloc3(&allocated3);
1640 AllocVec v3(std::move(v), alloc3);
1641 EXPECT_THAT(allocated3, Eq(static_cast<int64_t>(v3.size() * sizeof(int))));
1642 }
1643 EXPECT_EQ(allocated, 0);
1644 {
1645 // Test shrink_to_fit deallocations.
1646 AllocVec v(8, 2, alloc);
1647 EXPECT_EQ(allocated, static_cast<int64_t>(8 * sizeof(int)));
1648 v.resize(5);
1649 EXPECT_EQ(allocated, static_cast<int64_t>(8 * sizeof(int)));
1650 v.shrink_to_fit();
1651 EXPECT_EQ(allocated, static_cast<int64_t>(5 * sizeof(int)));
1652 v.resize(4);
1653 EXPECT_EQ(allocated, static_cast<int64_t>(5 * sizeof(int)));
1654 v.shrink_to_fit();
1655 EXPECT_EQ(allocated, 0);
1656 }
1657 }
1658
TEST(AllocatorSupportTest,SwapBothAllocated)1659 TEST(AllocatorSupportTest, SwapBothAllocated) {
1660 using MyAlloc = CountingAllocator<int>;
1661 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1662 int64_t allocated1 = 0;
1663 int64_t allocated2 = 0;
1664 {
1665 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
1666 const int ia2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8};
1667 MyAlloc a1(&allocated1);
1668 MyAlloc a2(&allocated2);
1669 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
1670 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2);
1671 EXPECT_LT(v1.capacity(), v2.capacity());
1672 EXPECT_THAT(allocated1,
1673 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1674 EXPECT_THAT(allocated2,
1675 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1676 v1.swap(v2);
1677 EXPECT_THAT(v1, ElementsAreArray(ia2));
1678 EXPECT_THAT(v2, ElementsAreArray(ia1));
1679 EXPECT_THAT(allocated1,
1680 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1681 EXPECT_THAT(allocated2,
1682 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1683 }
1684 EXPECT_THAT(allocated1, 0);
1685 EXPECT_THAT(allocated2, 0);
1686 }
1687
TEST(AllocatorSupportTest,SwapOneAllocated)1688 TEST(AllocatorSupportTest, SwapOneAllocated) {
1689 using MyAlloc = CountingAllocator<int>;
1690 using AllocVec = absl::InlinedVector<int, 4, MyAlloc>;
1691 int64_t allocated1 = 0;
1692 int64_t allocated2 = 0;
1693 {
1694 const int ia1[] = {0, 1, 2, 3, 4, 5, 6, 7};
1695 const int ia2[] = {0, 1, 2, 3};
1696 MyAlloc a1(&allocated1);
1697 MyAlloc a2(&allocated2);
1698 AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1);
1699 AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2);
1700 EXPECT_THAT(allocated1,
1701 Eq(static_cast<int64_t>(v1.capacity() * sizeof(int))));
1702 EXPECT_THAT(allocated2, Eq(0));
1703 v1.swap(v2);
1704 EXPECT_THAT(v1, ElementsAreArray(ia2));
1705 EXPECT_THAT(v2, ElementsAreArray(ia1));
1706 EXPECT_THAT(allocated1,
1707 Eq(static_cast<int64_t>(v2.capacity() * sizeof(int))));
1708 EXPECT_THAT(allocated2, Eq(0));
1709 EXPECT_TRUE(v2.get_allocator() == a1);
1710 EXPECT_TRUE(v1.get_allocator() == a2);
1711 }
1712 EXPECT_THAT(allocated1, 0);
1713 EXPECT_THAT(allocated2, 0);
1714 }
1715
TEST(AllocatorSupportTest,ScopedAllocatorWorksInlined)1716 TEST(AllocatorSupportTest, ScopedAllocatorWorksInlined) {
1717 using StdVector = std::vector<int, CountingAllocator<int>>;
1718 using Alloc = CountingAllocator<StdVector>;
1719 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
1720 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
1721
1722 int64_t total_allocated_byte_count = 0;
1723
1724 AllocVec inlined_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
1725
1726 // Called only once to remain inlined
1727 inlined_case.emplace_back();
1728
1729 int64_t absl_responsible_for_count = total_allocated_byte_count;
1730
1731 // MSVC's allocator preemptively allocates in debug mode
1732 #if !defined(_MSC_VER)
1733 EXPECT_EQ(absl_responsible_for_count, 0);
1734 #endif // !defined(_MSC_VER)
1735
1736 inlined_case[0].emplace_back();
1737 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
1738
1739 inlined_case.clear();
1740 inlined_case.shrink_to_fit();
1741 EXPECT_EQ(total_allocated_byte_count, 0);
1742 }
1743
TEST(AllocatorSupportTest,ScopedAllocatorWorksAllocated)1744 TEST(AllocatorSupportTest, ScopedAllocatorWorksAllocated) {
1745 using StdVector = std::vector<int, CountingAllocator<int>>;
1746 using Alloc = CountingAllocator<StdVector>;
1747 using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>;
1748 using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>;
1749
1750 int64_t total_allocated_byte_count = 0;
1751
1752 AllocVec allocated_case(ScopedAlloc(Alloc(+&total_allocated_byte_count)));
1753
1754 // Called twice to force into being allocated
1755 allocated_case.emplace_back();
1756 allocated_case.emplace_back();
1757
1758 int64_t absl_responsible_for_count = total_allocated_byte_count;
1759 EXPECT_GT(absl_responsible_for_count, 0);
1760
1761 allocated_case[1].emplace_back();
1762 EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count);
1763
1764 allocated_case.clear();
1765 allocated_case.shrink_to_fit();
1766 EXPECT_EQ(total_allocated_byte_count, 0);
1767 }
1768
TEST(AllocatorSupportTest,SizeAllocConstructor)1769 TEST(AllocatorSupportTest, SizeAllocConstructor) {
1770 constexpr size_t inlined_size = 4;
1771 using Alloc = CountingAllocator<int>;
1772 using AllocVec = absl::InlinedVector<int, inlined_size, Alloc>;
1773
1774 {
1775 auto len = inlined_size / 2;
1776 int64_t allocated = 0;
1777 auto v = AllocVec(len, Alloc(&allocated));
1778
1779 // Inline storage used; allocator should not be invoked
1780 EXPECT_THAT(allocated, Eq(0));
1781 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0)));
1782 }
1783
1784 {
1785 auto len = inlined_size * 2;
1786 int64_t allocated = 0;
1787 auto v = AllocVec(len, Alloc(&allocated));
1788
1789 // Out of line storage used; allocation of 8 elements expected
1790 EXPECT_THAT(allocated, Eq(static_cast<int64_t>(len * sizeof(int))));
1791 EXPECT_THAT(v, AllOf(SizeIs(len), Each(0)));
1792 }
1793 }
1794
TEST(InlinedVectorTest,MinimumAllocatorCompilesUsingTraits)1795 TEST(InlinedVectorTest, MinimumAllocatorCompilesUsingTraits) {
1796 using T = int;
1797 using A = std::allocator<T>;
1798 using ATraits = absl::allocator_traits<A>;
1799
1800 struct MinimumAllocator {
1801 using value_type = T;
1802
1803 value_type* allocate(size_t n) {
1804 A a;
1805 return ATraits::allocate(a, n);
1806 }
1807
1808 void deallocate(value_type* p, size_t n) {
1809 A a;
1810 ATraits::deallocate(a, p, n);
1811 }
1812 };
1813
1814 absl::InlinedVector<T, 1, MinimumAllocator> vec;
1815 vec.emplace_back();
1816 vec.resize(0);
1817 }
1818
TEST(InlinedVectorTest,AbslHashValueWorks)1819 TEST(InlinedVectorTest, AbslHashValueWorks) {
1820 using V = absl::InlinedVector<int, 4>;
1821 std::vector<V> cases;
1822
1823 // Generate a variety of vectors some of these are small enough for the inline
1824 // space but are stored out of line.
1825 for (size_t i = 0; i < 10; ++i) {
1826 V v;
1827 for (int j = 0; j < static_cast<int>(i); ++j) {
1828 v.push_back(j);
1829 }
1830 cases.push_back(v);
1831 v.resize(i % 4);
1832 cases.push_back(v);
1833 }
1834
1835 EXPECT_TRUE(absl::VerifyTypeImplementsAbslHashCorrectly(cases));
1836 }
1837
1838 class MoveConstructibleOnlyInstance
1839 : public absl::test_internal::BaseCountedInstance {
1840 public:
MoveConstructibleOnlyInstance(int x)1841 explicit MoveConstructibleOnlyInstance(int x) : BaseCountedInstance(x) {}
1842 MoveConstructibleOnlyInstance(MoveConstructibleOnlyInstance&& other) =
1843 default;
1844 MoveConstructibleOnlyInstance& operator=(
1845 MoveConstructibleOnlyInstance&& other) = delete;
1846 };
1847
1848 MATCHER(HasValue, "") {
1849 return ::testing::get<0>(arg).value() == ::testing::get<1>(arg);
1850 }
1851
TEST(NonAssignableMoveAssignmentTest,AllocatedToInline)1852 TEST(NonAssignableMoveAssignmentTest, AllocatedToInline) {
1853 using X = MoveConstructibleOnlyInstance;
1854 InstanceTracker tracker;
1855 absl::InlinedVector<X, 2> inlined;
1856 inlined.emplace_back(1);
1857 absl::InlinedVector<X, 2> allocated;
1858 allocated.emplace_back(1);
1859 allocated.emplace_back(2);
1860 allocated.emplace_back(3);
1861 tracker.ResetCopiesMovesSwaps();
1862
1863 inlined = std::move(allocated);
1864 // passed ownership of the allocated storage
1865 EXPECT_EQ(tracker.moves(), 0);
1866 EXPECT_EQ(tracker.live_instances(), 3);
1867
1868 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3}));
1869 }
1870
TEST(NonAssignableMoveAssignmentTest,InlineToAllocated)1871 TEST(NonAssignableMoveAssignmentTest, InlineToAllocated) {
1872 using X = MoveConstructibleOnlyInstance;
1873 InstanceTracker tracker;
1874 absl::InlinedVector<X, 2> inlined;
1875 inlined.emplace_back(1);
1876 absl::InlinedVector<X, 2> allocated;
1877 allocated.emplace_back(1);
1878 allocated.emplace_back(2);
1879 allocated.emplace_back(3);
1880 tracker.ResetCopiesMovesSwaps();
1881
1882 allocated = std::move(inlined);
1883 // Moved elements
1884 EXPECT_EQ(tracker.moves(), 1);
1885 EXPECT_EQ(tracker.live_instances(), 1);
1886
1887 EXPECT_THAT(allocated, Pointwise(HasValue(), {1}));
1888 }
1889
TEST(NonAssignableMoveAssignmentTest,InlineToInline)1890 TEST(NonAssignableMoveAssignmentTest, InlineToInline) {
1891 using X = MoveConstructibleOnlyInstance;
1892 InstanceTracker tracker;
1893 absl::InlinedVector<X, 2> inlined_a;
1894 inlined_a.emplace_back(1);
1895 absl::InlinedVector<X, 2> inlined_b;
1896 inlined_b.emplace_back(1);
1897 tracker.ResetCopiesMovesSwaps();
1898
1899 inlined_a = std::move(inlined_b);
1900 // Moved elements
1901 EXPECT_EQ(tracker.moves(), 1);
1902 EXPECT_EQ(tracker.live_instances(), 1);
1903
1904 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {1}));
1905 }
1906
TEST(NonAssignableMoveAssignmentTest,AllocatedToAllocated)1907 TEST(NonAssignableMoveAssignmentTest, AllocatedToAllocated) {
1908 using X = MoveConstructibleOnlyInstance;
1909 InstanceTracker tracker;
1910 absl::InlinedVector<X, 2> allocated_a;
1911 allocated_a.emplace_back(1);
1912 allocated_a.emplace_back(2);
1913 allocated_a.emplace_back(3);
1914 absl::InlinedVector<X, 2> allocated_b;
1915 allocated_b.emplace_back(4);
1916 allocated_b.emplace_back(5);
1917 allocated_b.emplace_back(6);
1918 allocated_b.emplace_back(7);
1919 tracker.ResetCopiesMovesSwaps();
1920
1921 allocated_a = std::move(allocated_b);
1922 // passed ownership of the allocated storage
1923 EXPECT_EQ(tracker.moves(), 0);
1924 EXPECT_EQ(tracker.live_instances(), 4);
1925
1926 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7}));
1927 }
1928
TEST(NonAssignableMoveAssignmentTest,AssignThis)1929 TEST(NonAssignableMoveAssignmentTest, AssignThis) {
1930 using X = MoveConstructibleOnlyInstance;
1931 InstanceTracker tracker;
1932 absl::InlinedVector<X, 2> v;
1933 v.emplace_back(1);
1934 v.emplace_back(2);
1935 v.emplace_back(3);
1936
1937 tracker.ResetCopiesMovesSwaps();
1938
1939 // Obfuscated in order to pass -Wself-move.
1940 v = std::move(*std::addressof(v));
1941 // nothing happens
1942 EXPECT_EQ(tracker.moves(), 0);
1943 EXPECT_EQ(tracker.live_instances(), 3);
1944
1945 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3}));
1946 }
1947
1948 class NonSwappableInstance : public absl::test_internal::BaseCountedInstance {
1949 public:
NonSwappableInstance(int x)1950 explicit NonSwappableInstance(int x) : BaseCountedInstance(x) {}
1951 NonSwappableInstance(const NonSwappableInstance& other) = default;
1952 NonSwappableInstance& operator=(const NonSwappableInstance& other) = default;
1953 NonSwappableInstance(NonSwappableInstance&& other) = default;
1954 NonSwappableInstance& operator=(NonSwappableInstance&& other) = default;
1955 };
1956
1957 void swap(NonSwappableInstance&, NonSwappableInstance&) = delete;
1958
TEST(NonSwappableSwapTest,InlineAndAllocatedTransferStorageAndMove)1959 TEST(NonSwappableSwapTest, InlineAndAllocatedTransferStorageAndMove) {
1960 using X = NonSwappableInstance;
1961 InstanceTracker tracker;
1962 absl::InlinedVector<X, 2> inlined;
1963 inlined.emplace_back(1);
1964 absl::InlinedVector<X, 2> allocated;
1965 allocated.emplace_back(1);
1966 allocated.emplace_back(2);
1967 allocated.emplace_back(3);
1968 tracker.ResetCopiesMovesSwaps();
1969
1970 inlined.swap(allocated);
1971 EXPECT_EQ(tracker.moves(), 1);
1972 EXPECT_EQ(tracker.live_instances(), 4);
1973
1974 EXPECT_THAT(inlined, Pointwise(HasValue(), {1, 2, 3}));
1975 }
1976
TEST(NonSwappableSwapTest,InlineAndInlineMoveIndividualElements)1977 TEST(NonSwappableSwapTest, InlineAndInlineMoveIndividualElements) {
1978 using X = NonSwappableInstance;
1979 InstanceTracker tracker;
1980 absl::InlinedVector<X, 2> inlined_a;
1981 inlined_a.emplace_back(1);
1982 absl::InlinedVector<X, 2> inlined_b;
1983 inlined_b.emplace_back(2);
1984 tracker.ResetCopiesMovesSwaps();
1985
1986 inlined_a.swap(inlined_b);
1987 EXPECT_EQ(tracker.moves(), 3);
1988 EXPECT_EQ(tracker.live_instances(), 2);
1989
1990 EXPECT_THAT(inlined_a, Pointwise(HasValue(), {2}));
1991 EXPECT_THAT(inlined_b, Pointwise(HasValue(), {1}));
1992 }
1993
TEST(NonSwappableSwapTest,AllocatedAndAllocatedOnlyTransferStorage)1994 TEST(NonSwappableSwapTest, AllocatedAndAllocatedOnlyTransferStorage) {
1995 using X = NonSwappableInstance;
1996 InstanceTracker tracker;
1997 absl::InlinedVector<X, 2> allocated_a;
1998 allocated_a.emplace_back(1);
1999 allocated_a.emplace_back(2);
2000 allocated_a.emplace_back(3);
2001 absl::InlinedVector<X, 2> allocated_b;
2002 allocated_b.emplace_back(4);
2003 allocated_b.emplace_back(5);
2004 allocated_b.emplace_back(6);
2005 allocated_b.emplace_back(7);
2006 tracker.ResetCopiesMovesSwaps();
2007
2008 allocated_a.swap(allocated_b);
2009 EXPECT_EQ(tracker.moves(), 0);
2010 EXPECT_EQ(tracker.live_instances(), 7);
2011
2012 EXPECT_THAT(allocated_a, Pointwise(HasValue(), {4, 5, 6, 7}));
2013 EXPECT_THAT(allocated_b, Pointwise(HasValue(), {1, 2, 3}));
2014 }
2015
TEST(NonSwappableSwapTest,SwapThis)2016 TEST(NonSwappableSwapTest, SwapThis) {
2017 using X = NonSwappableInstance;
2018 InstanceTracker tracker;
2019 absl::InlinedVector<X, 2> v;
2020 v.emplace_back(1);
2021 v.emplace_back(2);
2022 v.emplace_back(3);
2023
2024 tracker.ResetCopiesMovesSwaps();
2025
2026 v.swap(v);
2027 EXPECT_EQ(tracker.moves(), 0);
2028 EXPECT_EQ(tracker.live_instances(), 3);
2029
2030 EXPECT_THAT(v, Pointwise(HasValue(), {1, 2, 3}));
2031 }
2032
2033 template <size_t N>
2034 using CharVec = absl::InlinedVector<char, N>;
2035
2036 // Warning: This struct "simulates" the type `InlinedVector::Storage::Allocated`
2037 // to make reasonable expectations for inlined storage capacity optimization. If
2038 // implementation changes `Allocated`, then `MySpan` and tests that use it need
2039 // to be updated accordingly.
2040 template <typename T>
2041 struct MySpan {
2042 T* data;
2043 size_t size;
2044 };
2045
TEST(StorageTest,InlinedCapacityAutoIncrease)2046 TEST(StorageTest, InlinedCapacityAutoIncrease) {
2047 // The requested capacity is auto increased to `sizeof(MySpan<char>)`.
2048 EXPECT_GT(CharVec<1>().capacity(), 1);
2049 EXPECT_EQ(CharVec<1>().capacity(), sizeof(MySpan<char>));
2050 EXPECT_EQ(CharVec<1>().capacity(), CharVec<2>().capacity());
2051 EXPECT_EQ(sizeof(CharVec<1>), sizeof(CharVec<2>));
2052
2053 // The requested capacity is auto increased to
2054 // `sizeof(MySpan<int>) / sizeof(int)`.
2055 EXPECT_GT((absl::InlinedVector<int, 1>().capacity()), 1);
2056 EXPECT_EQ((absl::InlinedVector<int, 1>().capacity()),
2057 sizeof(MySpan<int>) / sizeof(int));
2058 }
2059
2060 } // anonymous namespace
2061