1 // Copyright 2019 The Abseil Authors. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // https://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 16 #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 17 18 #include <algorithm> 19 #include <cstddef> 20 #include <cstring> 21 #include <iterator> 22 #include <limits> 23 #include <memory> 24 #include <new> 25 #include <type_traits> 26 #include <utility> 27 28 #include "absl/base/attributes.h" 29 #include "absl/base/macros.h" 30 #include "absl/container/internal/compressed_tuple.h" 31 #include "absl/memory/memory.h" 32 #include "absl/meta/type_traits.h" 33 #include "absl/types/span.h" 34 35 namespace absl { 36 ABSL_NAMESPACE_BEGIN 37 namespace inlined_vector_internal { 38 39 // GCC does not deal very well with the below code 40 #if !defined(__clang__) && defined(__GNUC__) 41 #pragma GCC diagnostic push 42 #pragma GCC diagnostic ignored "-Warray-bounds" 43 #endif 44 45 template <typename A> 46 using AllocatorTraits = std::allocator_traits<A>; 47 template <typename A> 48 using ValueType = typename AllocatorTraits<A>::value_type; 49 template <typename A> 50 using SizeType = typename AllocatorTraits<A>::size_type; 51 template <typename A> 52 using Pointer = typename AllocatorTraits<A>::pointer; 53 template <typename A> 54 using ConstPointer = typename AllocatorTraits<A>::const_pointer; 55 template <typename A> 56 using SizeType = typename AllocatorTraits<A>::size_type; 57 template <typename A> 58 using DifferenceType = typename AllocatorTraits<A>::difference_type; 59 template <typename A> 60 using Reference = ValueType<A>&; 61 template <typename A> 62 using ConstReference = const ValueType<A>&; 63 template <typename A> 64 using Iterator = Pointer<A>; 65 template <typename A> 66 using ConstIterator = ConstPointer<A>; 67 template <typename A> 68 using ReverseIterator = typename std::reverse_iterator<Iterator<A>>; 69 template <typename A> 70 using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>; 71 template <typename A> 72 using MoveIterator = typename std::move_iterator<Iterator<A>>; 73 74 template <typename Iterator> 75 using IsAtLeastForwardIterator = std::is_convertible< 76 typename std::iterator_traits<Iterator>::iterator_category, 77 std::forward_iterator_tag>; 78 79 template <typename A> 80 using IsMemcpyOk = 81 absl::conjunction<std::is_same<A, std::allocator<ValueType<A>>>, 82 absl::is_trivially_copy_constructible<ValueType<A>>, 83 absl::is_trivially_copy_assignable<ValueType<A>>, 84 absl::is_trivially_destructible<ValueType<A>>>; 85 86 template <typename A> 87 using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>; 88 template <typename A> 89 using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>; 90 91 template <typename T> 92 struct TypeIdentity { 93 using type = T; 94 }; 95 96 // Used for function arguments in template functions to prevent ADL by forcing 97 // callers to explicitly specify the template parameter. 98 template <typename T> 99 using NoTypeDeduction = typename TypeIdentity<T>::type; 100 101 template <typename A, bool IsTriviallyDestructible = 102 absl::is_trivially_destructible<ValueType<A>>::value> 103 struct DestroyAdapter; 104 105 template <typename A> 106 struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> { 107 static void DestroyElements(A& allocator, Pointer<A> destroy_first, 108 SizeType<A> destroy_size) { 109 for (SizeType<A> i = destroy_size; i != 0;) { 110 --i; 111 AllocatorTraits<A>::destroy(allocator, destroy_first + i); 112 } 113 } 114 }; 115 116 template <typename A> 117 struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> { 118 static void DestroyElements(A& allocator, Pointer<A> destroy_first, 119 SizeType<A> destroy_size) { 120 static_cast<void>(allocator); 121 static_cast<void>(destroy_first); 122 static_cast<void>(destroy_size); 123 } 124 }; 125 126 template <typename A> 127 struct Allocation { 128 Pointer<A> data = nullptr; 129 SizeType<A> capacity = 0; 130 }; 131 132 template <typename A, 133 bool IsOverAligned = 134 (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)> 135 struct MallocAdapter { 136 static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) { 137 return {AllocatorTraits<A>::allocate(allocator, requested_capacity), 138 requested_capacity}; 139 } 140 141 static void Deallocate(A& allocator, Pointer<A> pointer, 142 SizeType<A> capacity) { 143 AllocatorTraits<A>::deallocate(allocator, pointer, capacity); 144 } 145 }; 146 147 template <typename A, typename ValueAdapter> 148 void ConstructElements(NoTypeDeduction<A>& allocator, 149 Pointer<A> construct_first, ValueAdapter& values, 150 SizeType<A> construct_size) { 151 for (SizeType<A> i = 0; i < construct_size; ++i) { 152 ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); } 153 ABSL_INTERNAL_CATCH_ANY { 154 DestroyAdapter<A>::DestroyElements(allocator, construct_first, i); 155 ABSL_INTERNAL_RETHROW; 156 } 157 } 158 } 159 160 template <typename A, typename ValueAdapter> 161 void AssignElements(Pointer<A> assign_first, ValueAdapter& values, 162 SizeType<A> assign_size) { 163 for (SizeType<A> i = 0; i < assign_size; ++i) { 164 values.AssignNext(assign_first + i); 165 } 166 } 167 168 template <typename A> 169 struct StorageView { 170 Pointer<A> data; 171 SizeType<A> size; 172 SizeType<A> capacity; 173 }; 174 175 template <typename A, typename Iterator> 176 class IteratorValueAdapter { 177 public: 178 explicit IteratorValueAdapter(const Iterator& it) : it_(it) {} 179 180 void ConstructNext(A& allocator, Pointer<A> construct_at) { 181 AllocatorTraits<A>::construct(allocator, construct_at, *it_); 182 ++it_; 183 } 184 185 void AssignNext(Pointer<A> assign_at) { 186 *assign_at = *it_; 187 ++it_; 188 } 189 190 private: 191 Iterator it_; 192 }; 193 194 template <typename A> 195 class CopyValueAdapter { 196 public: 197 explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {} 198 199 void ConstructNext(A& allocator, Pointer<A> construct_at) { 200 AllocatorTraits<A>::construct(allocator, construct_at, *ptr_); 201 } 202 203 void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; } 204 205 private: 206 ConstPointer<A> ptr_; 207 }; 208 209 template <typename A> 210 class DefaultValueAdapter { 211 public: 212 explicit DefaultValueAdapter() {} 213 214 void ConstructNext(A& allocator, Pointer<A> construct_at) { 215 AllocatorTraits<A>::construct(allocator, construct_at); 216 } 217 218 void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); } 219 }; 220 221 template <typename A> 222 class AllocationTransaction { 223 public: 224 explicit AllocationTransaction(A& allocator) 225 : allocator_data_(allocator, nullptr), capacity_(0) {} 226 227 ~AllocationTransaction() { 228 if (DidAllocate()) { 229 MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity()); 230 } 231 } 232 233 AllocationTransaction(const AllocationTransaction&) = delete; 234 void operator=(const AllocationTransaction&) = delete; 235 236 A& GetAllocator() { return allocator_data_.template get<0>(); } 237 Pointer<A>& GetData() { return allocator_data_.template get<1>(); } 238 SizeType<A>& GetCapacity() { return capacity_; } 239 240 bool DidAllocate() { return GetData() != nullptr; } 241 242 Pointer<A> Allocate(SizeType<A> requested_capacity) { 243 Allocation<A> result = 244 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 245 GetData() = result.data; 246 GetCapacity() = result.capacity; 247 return result.data; 248 } 249 250 ABSL_MUST_USE_RESULT Allocation<A> Release() && { 251 Allocation<A> result = {GetData(), GetCapacity()}; 252 Reset(); 253 return result; 254 } 255 256 private: 257 void Reset() { 258 GetData() = nullptr; 259 GetCapacity() = 0; 260 } 261 262 container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; 263 SizeType<A> capacity_; 264 }; 265 266 template <typename A> 267 class ConstructionTransaction { 268 public: 269 explicit ConstructionTransaction(A& allocator) 270 : allocator_data_(allocator, nullptr), size_(0) {} 271 272 ~ConstructionTransaction() { 273 if (DidConstruct()) { 274 DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize()); 275 } 276 } 277 278 ConstructionTransaction(const ConstructionTransaction&) = delete; 279 void operator=(const ConstructionTransaction&) = delete; 280 281 A& GetAllocator() { return allocator_data_.template get<0>(); } 282 Pointer<A>& GetData() { return allocator_data_.template get<1>(); } 283 SizeType<A>& GetSize() { return size_; } 284 285 bool DidConstruct() { return GetData() != nullptr; } 286 template <typename ValueAdapter> 287 void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) { 288 ConstructElements<A>(GetAllocator(), data, values, size); 289 GetData() = data; 290 GetSize() = size; 291 } 292 void Commit() && { 293 GetData() = nullptr; 294 GetSize() = 0; 295 } 296 297 private: 298 container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; 299 SizeType<A> size_; 300 }; 301 302 template <typename T, size_t N, typename A> 303 class Storage { 304 public: 305 struct MemcpyPolicy {}; 306 struct ElementwiseAssignPolicy {}; 307 struct ElementwiseSwapPolicy {}; 308 struct ElementwiseConstructPolicy {}; 309 310 using MoveAssignmentPolicy = absl::conditional_t< 311 IsMemcpyOk<A>::value, MemcpyPolicy, 312 absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy, 313 ElementwiseConstructPolicy>>; 314 using SwapPolicy = absl::conditional_t< 315 IsMemcpyOk<A>::value, MemcpyPolicy, 316 absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy, 317 ElementwiseConstructPolicy>>; 318 319 static SizeType<A> NextCapacity(SizeType<A> current_capacity) { 320 return current_capacity * 2; 321 } 322 323 static SizeType<A> ComputeCapacity(SizeType<A> current_capacity, 324 SizeType<A> requested_capacity) { 325 return (std::max)(NextCapacity(current_capacity), requested_capacity); 326 } 327 328 // --------------------------------------------------------------------------- 329 // Storage Constructors and Destructor 330 // --------------------------------------------------------------------------- 331 332 Storage() : metadata_(A(), /* size and is_allocated */ 0u) {} 333 334 explicit Storage(const A& allocator) 335 : metadata_(allocator, /* size and is_allocated */ 0u) {} 336 337 ~Storage() { 338 if (GetSizeAndIsAllocated() == 0) { 339 // Empty and not allocated; nothing to do. 340 } else if (IsMemcpyOk<A>::value) { 341 // No destructors need to be run; just deallocate if necessary. 342 DeallocateIfAllocated(); 343 } else { 344 DestroyContents(); 345 } 346 } 347 348 // --------------------------------------------------------------------------- 349 // Storage Member Accessors 350 // --------------------------------------------------------------------------- 351 352 SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } 353 354 const SizeType<A>& GetSizeAndIsAllocated() const { 355 return metadata_.template get<1>(); 356 } 357 358 SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } 359 360 bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } 361 362 Pointer<A> GetAllocatedData() { return data_.allocated.allocated_data; } 363 364 ConstPointer<A> GetAllocatedData() const { 365 return data_.allocated.allocated_data; 366 } 367 368 Pointer<A> GetInlinedData() { 369 return reinterpret_cast<Pointer<A>>( 370 std::addressof(data_.inlined.inlined_data[0])); 371 } 372 373 ConstPointer<A> GetInlinedData() const { 374 return reinterpret_cast<ConstPointer<A>>( 375 std::addressof(data_.inlined.inlined_data[0])); 376 } 377 378 SizeType<A> GetAllocatedCapacity() const { 379 return data_.allocated.allocated_capacity; 380 } 381 382 SizeType<A> GetInlinedCapacity() const { 383 return static_cast<SizeType<A>>(kOptimalInlinedSize); 384 } 385 386 StorageView<A> MakeStorageView() { 387 return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(), 388 GetAllocatedCapacity()} 389 : StorageView<A>{GetInlinedData(), GetSize(), 390 GetInlinedCapacity()}; 391 } 392 393 A& GetAllocator() { return metadata_.template get<0>(); } 394 395 const A& GetAllocator() const { return metadata_.template get<0>(); } 396 397 // --------------------------------------------------------------------------- 398 // Storage Member Mutators 399 // --------------------------------------------------------------------------- 400 401 ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other); 402 403 template <typename ValueAdapter> 404 void Initialize(ValueAdapter values, SizeType<A> new_size); 405 406 template <typename ValueAdapter> 407 void Assign(ValueAdapter values, SizeType<A> new_size); 408 409 template <typename ValueAdapter> 410 void Resize(ValueAdapter values, SizeType<A> new_size); 411 412 template <typename ValueAdapter> 413 Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values, 414 SizeType<A> insert_count); 415 416 template <typename... Args> 417 Reference<A> EmplaceBack(Args&&... args); 418 419 Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to); 420 421 void Reserve(SizeType<A> requested_capacity); 422 423 void ShrinkToFit(); 424 425 void Swap(Storage* other_storage_ptr); 426 427 void SetIsAllocated() { 428 GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1); 429 } 430 431 void UnsetIsAllocated() { 432 GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1); 433 } 434 435 void SetSize(SizeType<A> size) { 436 GetSizeAndIsAllocated() = 437 (size << 1) | static_cast<SizeType<A>>(GetIsAllocated()); 438 } 439 440 void SetAllocatedSize(SizeType<A> size) { 441 GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1); 442 } 443 444 void SetInlinedSize(SizeType<A> size) { 445 GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1); 446 } 447 448 void AddSize(SizeType<A> count) { 449 GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1); 450 } 451 452 void SubtractSize(SizeType<A> count) { 453 ABSL_HARDENING_ASSERT(count <= GetSize()); 454 455 GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1); 456 } 457 458 void SetAllocation(Allocation<A> allocation) { 459 data_.allocated.allocated_data = allocation.data; 460 data_.allocated.allocated_capacity = allocation.capacity; 461 } 462 463 void MemcpyFrom(const Storage& other_storage) { 464 ABSL_HARDENING_ASSERT(IsMemcpyOk<A>::value || 465 other_storage.GetIsAllocated()); 466 467 GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated(); 468 data_ = other_storage.data_; 469 } 470 471 void DeallocateIfAllocated() { 472 if (GetIsAllocated()) { 473 MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(), 474 GetAllocatedCapacity()); 475 } 476 } 477 478 private: 479 ABSL_ATTRIBUTE_NOINLINE void DestroyContents(); 480 481 using Metadata = container_internal::CompressedTuple<A, SizeType<A>>; 482 483 struct Allocated { 484 Pointer<A> allocated_data; 485 SizeType<A> allocated_capacity; 486 }; 487 488 // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the 489 // `InlinedVector`. Sometimes, it is possible to increase the capacity (from 490 // the user requested `N`) without increasing the size of the `InlinedVector`. 491 static constexpr size_t kOptimalInlinedSize = 492 (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>)); 493 494 struct Inlined { 495 alignas(ValueType<A>) char inlined_data[sizeof( 496 ValueType<A>[kOptimalInlinedSize])]; 497 }; 498 499 union Data { 500 Allocated allocated; 501 Inlined inlined; 502 }; 503 504 void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n); 505 void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n); 506 507 void SwapInlinedElements(MemcpyPolicy, Storage* other); 508 template <typename NotMemcpyPolicy> 509 void SwapInlinedElements(NotMemcpyPolicy, Storage* other); 510 511 template <typename... Args> 512 ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args); 513 514 Metadata metadata_; 515 Data data_; 516 }; 517 518 template <typename T, size_t N, typename A> 519 void Storage<T, N, A>::DestroyContents() { 520 Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData(); 521 DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize()); 522 DeallocateIfAllocated(); 523 } 524 525 template <typename T, size_t N, typename A> 526 void Storage<T, N, A>::InitFrom(const Storage& other) { 527 const SizeType<A> n = other.GetSize(); 528 ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller. 529 ConstPointer<A> src; 530 Pointer<A> dst; 531 if (!other.GetIsAllocated()) { 532 dst = GetInlinedData(); 533 src = other.GetInlinedData(); 534 } else { 535 // Because this is only called from the `InlinedVector` constructors, it's 536 // safe to take on the allocation with size `0`. If `ConstructElements(...)` 537 // throws, deallocation will be automatically handled by `~Storage()`. 538 SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n); 539 Allocation<A> allocation = 540 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 541 SetAllocation(allocation); 542 dst = allocation.data; 543 src = other.GetAllocatedData(); 544 } 545 if (IsMemcpyOk<A>::value) { 546 std::memcpy(reinterpret_cast<char*>(dst), 547 reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>)); 548 } else { 549 auto values = IteratorValueAdapter<A, ConstPointer<A>>(src); 550 ConstructElements<A>(GetAllocator(), dst, values, n); 551 } 552 GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated(); 553 } 554 555 template <typename T, size_t N, typename A> 556 template <typename ValueAdapter> 557 auto Storage<T, N, A>::Initialize(ValueAdapter values, SizeType<A> new_size) 558 -> void { 559 // Only callable from constructors! 560 ABSL_HARDENING_ASSERT(!GetIsAllocated()); 561 ABSL_HARDENING_ASSERT(GetSize() == 0); 562 563 Pointer<A> construct_data; 564 if (new_size > GetInlinedCapacity()) { 565 // Because this is only called from the `InlinedVector` constructors, it's 566 // safe to take on the allocation with size `0`. If `ConstructElements(...)` 567 // throws, deallocation will be automatically handled by `~Storage()`. 568 SizeType<A> requested_capacity = 569 ComputeCapacity(GetInlinedCapacity(), new_size); 570 Allocation<A> allocation = 571 MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); 572 construct_data = allocation.data; 573 SetAllocation(allocation); 574 SetIsAllocated(); 575 } else { 576 construct_data = GetInlinedData(); 577 } 578 579 ConstructElements<A>(GetAllocator(), construct_data, values, new_size); 580 581 // Since the initial size was guaranteed to be `0` and the allocated bit is 582 // already correct for either case, *adding* `new_size` gives us the correct 583 // result faster than setting it directly. 584 AddSize(new_size); 585 } 586 587 template <typename T, size_t N, typename A> 588 template <typename ValueAdapter> 589 auto Storage<T, N, A>::Assign(ValueAdapter values, SizeType<A> new_size) 590 -> void { 591 StorageView<A> storage_view = MakeStorageView(); 592 593 AllocationTransaction<A> allocation_tx(GetAllocator()); 594 595 absl::Span<ValueType<A>> assign_loop; 596 absl::Span<ValueType<A>> construct_loop; 597 absl::Span<ValueType<A>> destroy_loop; 598 599 if (new_size > storage_view.capacity) { 600 SizeType<A> requested_capacity = 601 ComputeCapacity(storage_view.capacity, new_size); 602 construct_loop = {allocation_tx.Allocate(requested_capacity), new_size}; 603 destroy_loop = {storage_view.data, storage_view.size}; 604 } else if (new_size > storage_view.size) { 605 assign_loop = {storage_view.data, storage_view.size}; 606 construct_loop = {storage_view.data + storage_view.size, 607 new_size - storage_view.size}; 608 } else { 609 assign_loop = {storage_view.data, new_size}; 610 destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; 611 } 612 613 AssignElements<A>(assign_loop.data(), values, assign_loop.size()); 614 615 ConstructElements<A>(GetAllocator(), construct_loop.data(), values, 616 construct_loop.size()); 617 618 DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(), 619 destroy_loop.size()); 620 621 if (allocation_tx.DidAllocate()) { 622 DeallocateIfAllocated(); 623 SetAllocation(std::move(allocation_tx).Release()); 624 SetIsAllocated(); 625 } 626 627 SetSize(new_size); 628 } 629 630 template <typename T, size_t N, typename A> 631 template <typename ValueAdapter> 632 auto Storage<T, N, A>::Resize(ValueAdapter values, SizeType<A> new_size) 633 -> void { 634 StorageView<A> storage_view = MakeStorageView(); 635 Pointer<A> const base = storage_view.data; 636 const SizeType<A> size = storage_view.size; 637 A& alloc = GetAllocator(); 638 if (new_size <= size) { 639 // Destroy extra old elements. 640 DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size); 641 } else if (new_size <= storage_view.capacity) { 642 // Construct new elements in place. 643 ConstructElements<A>(alloc, base + size, values, new_size - size); 644 } else { 645 // Steps: 646 // a. Allocate new backing store. 647 // b. Construct new elements in new backing store. 648 // c. Move existing elements from old backing store to new backing store. 649 // d. Destroy all elements in old backing store. 650 // Use transactional wrappers for the first two steps so we can roll 651 // back if necessary due to exceptions. 652 AllocationTransaction<A> allocation_tx(alloc); 653 SizeType<A> requested_capacity = 654 ComputeCapacity(storage_view.capacity, new_size); 655 Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); 656 657 ConstructionTransaction<A> construction_tx(alloc); 658 construction_tx.Construct(new_data + size, values, new_size - size); 659 660 IteratorValueAdapter<A, MoveIterator<A>> move_values( 661 (MoveIterator<A>(base))); 662 ConstructElements<A>(alloc, new_data, move_values, size); 663 664 DestroyAdapter<A>::DestroyElements(alloc, base, size); 665 std::move(construction_tx).Commit(); 666 DeallocateIfAllocated(); 667 SetAllocation(std::move(allocation_tx).Release()); 668 SetIsAllocated(); 669 } 670 SetSize(new_size); 671 } 672 673 template <typename T, size_t N, typename A> 674 template <typename ValueAdapter> 675 auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values, 676 SizeType<A> insert_count) -> Iterator<A> { 677 StorageView<A> storage_view = MakeStorageView(); 678 679 auto insert_index = static_cast<SizeType<A>>( 680 std::distance(ConstIterator<A>(storage_view.data), pos)); 681 SizeType<A> insert_end_index = insert_index + insert_count; 682 SizeType<A> new_size = storage_view.size + insert_count; 683 684 if (new_size > storage_view.capacity) { 685 AllocationTransaction<A> allocation_tx(GetAllocator()); 686 ConstructionTransaction<A> construction_tx(GetAllocator()); 687 ConstructionTransaction<A> move_construction_tx(GetAllocator()); 688 689 IteratorValueAdapter<A, MoveIterator<A>> move_values( 690 MoveIterator<A>(storage_view.data)); 691 692 SizeType<A> requested_capacity = 693 ComputeCapacity(storage_view.capacity, new_size); 694 Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); 695 696 construction_tx.Construct(new_data + insert_index, values, insert_count); 697 698 move_construction_tx.Construct(new_data, move_values, insert_index); 699 700 ConstructElements<A>(GetAllocator(), new_data + insert_end_index, 701 move_values, storage_view.size - insert_index); 702 703 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 704 storage_view.size); 705 706 std::move(construction_tx).Commit(); 707 std::move(move_construction_tx).Commit(); 708 DeallocateIfAllocated(); 709 SetAllocation(std::move(allocation_tx).Release()); 710 711 SetAllocatedSize(new_size); 712 return Iterator<A>(new_data + insert_index); 713 } else { 714 SizeType<A> move_construction_destination_index = 715 (std::max)(insert_end_index, storage_view.size); 716 717 ConstructionTransaction<A> move_construction_tx(GetAllocator()); 718 719 IteratorValueAdapter<A, MoveIterator<A>> move_construction_values( 720 MoveIterator<A>(storage_view.data + 721 (move_construction_destination_index - insert_count))); 722 absl::Span<ValueType<A>> move_construction = { 723 storage_view.data + move_construction_destination_index, 724 new_size - move_construction_destination_index}; 725 726 Pointer<A> move_assignment_values = storage_view.data + insert_index; 727 absl::Span<ValueType<A>> move_assignment = { 728 storage_view.data + insert_end_index, 729 move_construction_destination_index - insert_end_index}; 730 731 absl::Span<ValueType<A>> insert_assignment = {move_assignment_values, 732 move_construction.size()}; 733 734 absl::Span<ValueType<A>> insert_construction = { 735 insert_assignment.data() + insert_assignment.size(), 736 insert_count - insert_assignment.size()}; 737 738 move_construction_tx.Construct(move_construction.data(), 739 move_construction_values, 740 move_construction.size()); 741 742 for (Pointer<A> 743 destination = move_assignment.data() + move_assignment.size(), 744 last_destination = move_assignment.data(), 745 source = move_assignment_values + move_assignment.size(); 746 ;) { 747 --destination; 748 --source; 749 if (destination < last_destination) break; 750 *destination = std::move(*source); 751 } 752 753 AssignElements<A>(insert_assignment.data(), values, 754 insert_assignment.size()); 755 756 ConstructElements<A>(GetAllocator(), insert_construction.data(), values, 757 insert_construction.size()); 758 759 std::move(move_construction_tx).Commit(); 760 761 AddSize(insert_count); 762 return Iterator<A>(storage_view.data + insert_index); 763 } 764 } 765 766 template <typename T, size_t N, typename A> 767 template <typename... Args> 768 auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> { 769 StorageView<A> storage_view = MakeStorageView(); 770 const SizeType<A> n = storage_view.size; 771 if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) { 772 // Fast path; new element fits. 773 Pointer<A> last_ptr = storage_view.data + n; 774 AllocatorTraits<A>::construct(GetAllocator(), last_ptr, 775 std::forward<Args>(args)...); 776 AddSize(1); 777 return *last_ptr; 778 } 779 // TODO(b/173712035): Annotate with musttail attribute to prevent regression. 780 return EmplaceBackSlow(std::forward<Args>(args)...); 781 } 782 783 template <typename T, size_t N, typename A> 784 template <typename... Args> 785 auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> { 786 StorageView<A> storage_view = MakeStorageView(); 787 AllocationTransaction<A> allocation_tx(GetAllocator()); 788 IteratorValueAdapter<A, MoveIterator<A>> move_values( 789 MoveIterator<A>(storage_view.data)); 790 SizeType<A> requested_capacity = NextCapacity(storage_view.capacity); 791 Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity); 792 Pointer<A> last_ptr = construct_data + storage_view.size; 793 794 // Construct new element. 795 AllocatorTraits<A>::construct(GetAllocator(), last_ptr, 796 std::forward<Args>(args)...); 797 // Move elements from old backing store to new backing store. 798 ABSL_INTERNAL_TRY { 799 ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values, 800 storage_view.size); 801 } 802 ABSL_INTERNAL_CATCH_ANY { 803 AllocatorTraits<A>::destroy(GetAllocator(), last_ptr); 804 ABSL_INTERNAL_RETHROW; 805 } 806 // Destroy elements in old backing store. 807 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 808 storage_view.size); 809 810 DeallocateIfAllocated(); 811 SetAllocation(std::move(allocation_tx).Release()); 812 SetIsAllocated(); 813 AddSize(1); 814 return *last_ptr; 815 } 816 817 template <typename T, size_t N, typename A> 818 auto Storage<T, N, A>::Erase(ConstIterator<A> from, ConstIterator<A> to) 819 -> Iterator<A> { 820 StorageView<A> storage_view = MakeStorageView(); 821 822 auto erase_size = static_cast<SizeType<A>>(std::distance(from, to)); 823 auto erase_index = static_cast<SizeType<A>>( 824 std::distance(ConstIterator<A>(storage_view.data), from)); 825 SizeType<A> erase_end_index = erase_index + erase_size; 826 827 IteratorValueAdapter<A, MoveIterator<A>> move_values( 828 MoveIterator<A>(storage_view.data + erase_end_index)); 829 830 AssignElements<A>(storage_view.data + erase_index, move_values, 831 storage_view.size - erase_end_index); 832 833 DestroyAdapter<A>::DestroyElements( 834 GetAllocator(), storage_view.data + (storage_view.size - erase_size), 835 erase_size); 836 837 SubtractSize(erase_size); 838 return Iterator<A>(storage_view.data + erase_index); 839 } 840 841 template <typename T, size_t N, typename A> 842 auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void { 843 StorageView<A> storage_view = MakeStorageView(); 844 845 if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return; 846 847 AllocationTransaction<A> allocation_tx(GetAllocator()); 848 849 IteratorValueAdapter<A, MoveIterator<A>> move_values( 850 MoveIterator<A>(storage_view.data)); 851 852 SizeType<A> new_requested_capacity = 853 ComputeCapacity(storage_view.capacity, requested_capacity); 854 Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity); 855 856 ConstructElements<A>(GetAllocator(), new_data, move_values, 857 storage_view.size); 858 859 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 860 storage_view.size); 861 862 DeallocateIfAllocated(); 863 SetAllocation(std::move(allocation_tx).Release()); 864 SetIsAllocated(); 865 } 866 867 template <typename T, size_t N, typename A> 868 auto Storage<T, N, A>::ShrinkToFit() -> void { 869 // May only be called on allocated instances! 870 ABSL_HARDENING_ASSERT(GetIsAllocated()); 871 872 StorageView<A> storage_view{GetAllocatedData(), GetSize(), 873 GetAllocatedCapacity()}; 874 875 if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return; 876 877 AllocationTransaction<A> allocation_tx(GetAllocator()); 878 879 IteratorValueAdapter<A, MoveIterator<A>> move_values( 880 MoveIterator<A>(storage_view.data)); 881 882 Pointer<A> construct_data; 883 if (storage_view.size > GetInlinedCapacity()) { 884 SizeType<A> requested_capacity = storage_view.size; 885 construct_data = allocation_tx.Allocate(requested_capacity); 886 if (allocation_tx.GetCapacity() >= storage_view.capacity) { 887 // Already using the smallest available heap allocation. 888 return; 889 } 890 } else { 891 construct_data = GetInlinedData(); 892 } 893 894 ABSL_INTERNAL_TRY { 895 ConstructElements<A>(GetAllocator(), construct_data, move_values, 896 storage_view.size); 897 } 898 ABSL_INTERNAL_CATCH_ANY { 899 SetAllocation({storage_view.data, storage_view.capacity}); 900 ABSL_INTERNAL_RETHROW; 901 } 902 903 DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, 904 storage_view.size); 905 906 MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data, 907 storage_view.capacity); 908 909 if (allocation_tx.DidAllocate()) { 910 SetAllocation(std::move(allocation_tx).Release()); 911 } else { 912 UnsetIsAllocated(); 913 } 914 } 915 916 template <typename T, size_t N, typename A> 917 auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void { 918 using std::swap; 919 ABSL_HARDENING_ASSERT(this != other_storage_ptr); 920 921 if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) { 922 swap(data_.allocated, other_storage_ptr->data_.allocated); 923 } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) { 924 SwapInlinedElements(SwapPolicy{}, other_storage_ptr); 925 } else { 926 Storage* allocated_ptr = this; 927 Storage* inlined_ptr = other_storage_ptr; 928 if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr); 929 930 StorageView<A> allocated_storage_view{ 931 allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(), 932 allocated_ptr->GetAllocatedCapacity()}; 933 934 IteratorValueAdapter<A, MoveIterator<A>> move_values( 935 MoveIterator<A>(inlined_ptr->GetInlinedData())); 936 937 ABSL_INTERNAL_TRY { 938 ConstructElements<A>(inlined_ptr->GetAllocator(), 939 allocated_ptr->GetInlinedData(), move_values, 940 inlined_ptr->GetSize()); 941 } 942 ABSL_INTERNAL_CATCH_ANY { 943 allocated_ptr->SetAllocation(Allocation<A>{ 944 allocated_storage_view.data, allocated_storage_view.capacity}); 945 ABSL_INTERNAL_RETHROW; 946 } 947 948 DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(), 949 inlined_ptr->GetInlinedData(), 950 inlined_ptr->GetSize()); 951 952 inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data, 953 allocated_storage_view.capacity}); 954 } 955 956 swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated()); 957 swap(GetAllocator(), other_storage_ptr->GetAllocator()); 958 } 959 960 template <typename T, size_t N, typename A> 961 void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other, 962 SizeType<A> n) { 963 std::swap_ranges(GetInlinedData(), GetInlinedData() + n, 964 other->GetInlinedData()); 965 } 966 967 template <typename T, size_t N, typename A> 968 void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other, 969 SizeType<A> n) { 970 Pointer<A> a = GetInlinedData(); 971 Pointer<A> b = other->GetInlinedData(); 972 // see note on allocators in `SwapInlinedElements`. 973 A& allocator_a = GetAllocator(); 974 A& allocator_b = other->GetAllocator(); 975 for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) { 976 ValueType<A> tmp(std::move(*a)); 977 978 AllocatorTraits<A>::destroy(allocator_a, a); 979 AllocatorTraits<A>::construct(allocator_b, a, std::move(*b)); 980 981 AllocatorTraits<A>::destroy(allocator_b, b); 982 AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp)); 983 } 984 } 985 986 template <typename T, size_t N, typename A> 987 void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) { 988 Data tmp = data_; 989 data_ = other->data_; 990 other->data_ = tmp; 991 } 992 993 template <typename T, size_t N, typename A> 994 template <typename NotMemcpyPolicy> 995 void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy, 996 Storage* other) { 997 // Note: `destroy` needs to use pre-swap allocator while `construct` - 998 // post-swap allocator. Allocators will be swaped later on outside of 999 // `SwapInlinedElements`. 1000 Storage* small_ptr = this; 1001 Storage* large_ptr = other; 1002 if (small_ptr->GetSize() > large_ptr->GetSize()) { 1003 std::swap(small_ptr, large_ptr); 1004 } 1005 1006 auto small_size = small_ptr->GetSize(); 1007 auto diff = large_ptr->GetSize() - small_size; 1008 SwapN(policy, other, small_size); 1009 1010 IteratorValueAdapter<A, MoveIterator<A>> move_values( 1011 MoveIterator<A>(large_ptr->GetInlinedData() + small_size)); 1012 1013 ConstructElements<A>(large_ptr->GetAllocator(), 1014 small_ptr->GetInlinedData() + small_size, move_values, 1015 diff); 1016 1017 DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(), 1018 large_ptr->GetInlinedData() + small_size, 1019 diff); 1020 } 1021 1022 // End ignore "array-bounds" 1023 #if !defined(__clang__) && defined(__GNUC__) 1024 #pragma GCC diagnostic pop 1025 #endif 1026 1027 } // namespace inlined_vector_internal 1028 ABSL_NAMESPACE_END 1029 } // namespace absl 1030 1031 #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ 1032