xref: /aosp_15_r20/external/cronet/base/containers/circular_deque.h (revision 6777b5387eb2ff775bb5750e3f5d96f37fb7352b)
1 // Copyright 2017 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef BASE_CONTAINERS_CIRCULAR_DEQUE_H_
6 #define BASE_CONTAINERS_CIRCULAR_DEQUE_H_
7 
8 #include <algorithm>
9 #include <cstddef>
10 #include <iterator>
11 #include <type_traits>
12 #include <utility>
13 
14 #include "base/check.h"
15 #include "base/containers/vector_buffer.h"
16 #include "base/dcheck_is_on.h"
17 #include "base/memory/raw_ptr_exclusion.h"
18 #include "base/ranges/algorithm.h"
19 #include "base/template_util.h"
20 
21 #if DCHECK_IS_ON()
22 #include <ostream>
23 #endif
24 
25 // base::circular_deque is similar to std::deque. Unlike std::deque, the
26 // storage is provided in a flat circular buffer conceptually similar to a
27 // vector. The beginning and end will wrap around as necessary so that
28 // pushes and pops will be constant time as long as a capacity expansion is
29 // not required.
30 //
31 // The API should be identical to std::deque with the following differences:
32 //
33 //  - ITERATORS ARE NOT STABLE. Mutating the container will invalidate all
34 //    iterators.
35 //
36 //  - Insertions may resize the vector and so are not constant time (std::deque
37 //    guarantees constant time for insertions at the ends).
38 //
39 //  - Container-wide comparisons are not implemented. If you want to compare
40 //    two containers, use an algorithm so the expensive iteration is explicit.
41 //
42 // If you want a similar container with only a queue API, use base::queue in
43 // base/containers/queue.h.
44 //
45 // Constructors:
46 //   circular_deque();
47 //   circular_deque(size_t count);
48 //   circular_deque(size_t count, const T& value);
49 //   circular_deque(InputIterator first, InputIterator last);
50 //   circular_deque(const circular_deque&);
51 //   circular_deque(circular_deque&&);
52 //   circular_deque(std::initializer_list<value_type>);
53 //
54 // Assignment functions:
55 //   circular_deque& operator=(const circular_deque&);
56 //   circular_deque& operator=(circular_deque&&);
57 //   circular_deque& operator=(std::initializer_list<T>);
58 //   void assign(size_t count, const T& value);
59 //   void assign(InputIterator first, InputIterator last);
60 //   void assign(std::initializer_list<T> value);
61 //
62 // Random accessors:
63 //   T& at(size_t);
64 //   const T& at(size_t) const;
65 //   T& operator[](size_t);
66 //   const T& operator[](size_t) const;
67 //
68 // End accessors:
69 //   T& front();
70 //   const T& front() const;
71 //   T& back();
72 //   const T& back() const;
73 //
74 // Iterator functions:
75 //   iterator               begin();
76 //   const_iterator         begin() const;
77 //   const_iterator         cbegin() const;
78 //   iterator               end();
79 //   const_iterator         end() const;
80 //   const_iterator         cend() const;
81 //   reverse_iterator       rbegin();
82 //   const_reverse_iterator rbegin() const;
83 //   const_reverse_iterator crbegin() const;
84 //   reverse_iterator       rend();
85 //   const_reverse_iterator rend() const;
86 //   const_reverse_iterator crend() const;
87 //
88 // Memory management:
89 //   void reserve(size_t);  // SEE IMPLEMENTATION FOR SOME GOTCHAS.
90 //   size_t capacity() const;
91 //   void shrink_to_fit();
92 //
93 // Size management:
94 //   void clear();
95 //   bool empty() const;
96 //   size_t size() const;
97 //   void resize(size_t);
98 //   void resize(size_t count, const T& value);
99 //
100 // Positional insert and erase:
101 //   void insert(const_iterator pos, size_type count, const T& value);
102 //   void insert(const_iterator pos,
103 //               InputIterator first, InputIterator last);
104 //   iterator insert(const_iterator pos, const T& value);
105 //   iterator insert(const_iterator pos, T&& value);
106 //   iterator emplace(const_iterator pos, Args&&... args);
107 //   iterator erase(const_iterator pos);
108 //   iterator erase(const_iterator first, const_iterator last);
109 //
110 // End insert and erase:
111 //   void push_front(const T&);
112 //   void push_front(T&&);
113 //   void push_back(const T&);
114 //   void push_back(T&&);
115 //   T& emplace_front(Args&&...);
116 //   T& emplace_back(Args&&...);
117 //   void pop_front();
118 //   void pop_back();
119 //
120 // General:
121 //   void swap(circular_deque&);
122 
123 namespace base {
124 
125 template <class T>
126 class circular_deque;
127 
128 namespace internal {
129 
130 // Start allocating nonempty buffers with this many entries. This is the
131 // external capacity so the internal buffer will be one larger (= 4) which is
132 // more even for the allocator. See the descriptions of internal vs. external
133 // capacity on the comment above the buffer_ variable below.
134 constexpr size_t kCircularBufferInitialCapacity = 3;
135 
136 template <typename T>
137 class circular_deque_const_iterator {
138  public:
139   using difference_type = std::ptrdiff_t;
140   using value_type = T;
141   using pointer = const T*;
142   using reference = const T&;
143   using iterator_category = std::random_access_iterator_tag;
144 
circular_deque_const_iterator()145   circular_deque_const_iterator() : parent_deque_(nullptr), index_(0) {
146 #if DCHECK_IS_ON()
147     created_generation_ = 0;
148 #endif  // DCHECK_IS_ON()
149   }
150 
151   // Dereferencing.
152   const T& operator*() const {
153     CheckUnstableUsage();
154     parent_deque_->CheckValidIndex(index_);
155     return parent_deque_->buffer_[index_];
156   }
157   const T* operator->() const {
158     CheckUnstableUsage();
159     parent_deque_->CheckValidIndex(index_);
160     return &parent_deque_->buffer_[index_];
161   }
162   const value_type& operator[](difference_type i) const { return *(*this + i); }
163 
164   // Increment and decrement.
165   circular_deque_const_iterator& operator++() {
166     Increment();
167     return *this;
168   }
169   circular_deque_const_iterator operator++(int) {
170     circular_deque_const_iterator ret = *this;
171     Increment();
172     return ret;
173   }
174   circular_deque_const_iterator& operator--() {
175     Decrement();
176     return *this;
177   }
178   circular_deque_const_iterator operator--(int) {
179     circular_deque_const_iterator ret = *this;
180     Decrement();
181     return ret;
182   }
183 
184   // Random access mutation.
185   friend circular_deque_const_iterator operator+(
186       const circular_deque_const_iterator& iter,
187       difference_type offset) {
188     circular_deque_const_iterator ret = iter;
189     ret.Add(offset);
190     return ret;
191   }
192   circular_deque_const_iterator& operator+=(difference_type offset) {
193     Add(offset);
194     return *this;
195   }
196   friend circular_deque_const_iterator operator-(
197       const circular_deque_const_iterator& iter,
198       difference_type offset) {
199     circular_deque_const_iterator ret = iter;
200     ret.Add(-offset);
201     return ret;
202   }
203   circular_deque_const_iterator& operator-=(difference_type offset) {
204     Add(-offset);
205     return *this;
206   }
207 
208   friend std::ptrdiff_t operator-(const circular_deque_const_iterator& lhs,
209                                   const circular_deque_const_iterator& rhs) {
210     lhs.CheckComparable(rhs);
211     return static_cast<std::ptrdiff_t>(lhs.OffsetFromBegin() -
212                                        rhs.OffsetFromBegin());
213   }
214 
215   // Comparisons.
216   friend bool operator==(const circular_deque_const_iterator& lhs,
217                          const circular_deque_const_iterator& rhs) {
218     lhs.CheckComparable(rhs);
219     return lhs.index_ == rhs.index_;
220   }
221   friend std::strong_ordering operator<=>(
222       const circular_deque_const_iterator& lhs,
223       const circular_deque_const_iterator& rhs) {
224     lhs.CheckComparable(rhs);
225     // The order is based on the position of the element in the circular_dequeue
226     // rather than `index_` at which the element is stored in the ring buffer.
227     return lhs.OffsetFromBegin() <=> rhs.OffsetFromBegin();
228   }
229 
230  protected:
231   friend class circular_deque<T>;
232 
circular_deque_const_iterator(const circular_deque<T> * parent,size_t index)233   circular_deque_const_iterator(const circular_deque<T>* parent, size_t index)
234       : parent_deque_(parent), index_(index) {
235 #if DCHECK_IS_ON()
236     created_generation_ = parent->generation_;
237 #endif  // DCHECK_IS_ON()
238   }
239 
240   // Returns the offset from the beginning index of the buffer to the current
241   // item.
OffsetFromBegin()242   size_t OffsetFromBegin() const {
243     if (index_ >= parent_deque_->begin_)
244       return index_ - parent_deque_->begin_;  // On the same side as begin.
245     return parent_deque_->buffer_.capacity() - parent_deque_->begin_ + index_;
246   }
247 
248   // Most uses will be ++ and -- so use a simplified implementation.
Increment()249   void Increment() {
250     CheckUnstableUsage();
251     parent_deque_->CheckValidIndex(index_);
252     index_++;
253     if (index_ == parent_deque_->buffer_.capacity())
254       index_ = 0;
255   }
Decrement()256   void Decrement() {
257     CheckUnstableUsage();
258     parent_deque_->CheckValidIndexOrEnd(index_);
259     if (index_ == 0)
260       index_ = parent_deque_->buffer_.capacity() - 1;
261     else
262       index_--;
263   }
Add(difference_type delta)264   void Add(difference_type delta) {
265     CheckUnstableUsage();
266 #if DCHECK_IS_ON()
267     if (delta <= 0)
268       parent_deque_->CheckValidIndexOrEnd(index_);
269     else
270       parent_deque_->CheckValidIndex(index_);
271 #endif
272     // It should be valid to add 0 to any iterator, even if the container is
273     // empty and the iterator points to end(). The modulo below will divide
274     // by 0 if the buffer capacity is empty, so it's important to check for
275     // this case explicitly.
276     if (delta == 0)
277       return;
278 
279     difference_type new_offset = OffsetFromBegin() + delta;
280     DCHECK(new_offset >= 0 &&
281            new_offset <= static_cast<difference_type>(parent_deque_->size()));
282     index_ = (new_offset + parent_deque_->begin_) %
283              parent_deque_->buffer_.capacity();
284   }
285 
286 #if DCHECK_IS_ON()
CheckUnstableUsage()287   void CheckUnstableUsage() const {
288     DCHECK(parent_deque_);
289     // Since circular_deque doesn't guarantee stability, any attempt to
290     // dereference this iterator after a mutation (i.e. the generation doesn't
291     // match the original) in the container is illegal.
292     DCHECK(created_generation_ == parent_deque_->generation_)
293         << "circular_deque iterator dereferenced after mutation.";
294   }
CheckComparable(const circular_deque_const_iterator & other)295   void CheckComparable(const circular_deque_const_iterator& other) const {
296     DCHECK(parent_deque_ == other.parent_deque_);
297     // Since circular_deque doesn't guarantee stability, two iterators that
298     // are compared must have been generated without mutating the container.
299     // If this fires, the container was mutated between generating the two
300     // iterators being compared.
301     DCHECK(created_generation_ == other.created_generation_);
302   }
303 #else
CheckUnstableUsage()304   inline void CheckUnstableUsage() const {}
CheckComparable(const circular_deque_const_iterator &)305   inline void CheckComparable(const circular_deque_const_iterator&) const {}
306 #endif  // DCHECK_IS_ON()
307 
308   // `parent_deque_` is not a raw_ptr<...> for performance reasons: Usually
309   // on-stack pointer, pointing back to the collection being iterated, owned by
310   // object that iterates over it.  Additionally this is supported by the
311   // analysis of sampling profiler data and tab_search:top100:2020.
312   RAW_PTR_EXCLUSION const circular_deque<T>* parent_deque_;
313 
314   size_t index_;
315 
316 #if DCHECK_IS_ON()
317   // The generation of the parent deque when this iterator was created. The
318   // container will update the generation for every modification so we can
319   // test if the container was modified by comparing them.
320   uint64_t created_generation_;
321 #endif  // DCHECK_IS_ON()
322 };
323 
324 template <typename T>
325 class circular_deque_iterator : public circular_deque_const_iterator<T> {
326   using base = circular_deque_const_iterator<T>;
327 
328  public:
329   friend class circular_deque<T>;
330 
331   using difference_type = std::ptrdiff_t;
332   using value_type = T;
333   using pointer = T*;
334   using reference = T&;
335   using iterator_category = std::random_access_iterator_tag;
336 
337   // Expose the base class' constructor.
circular_deque_iterator()338   circular_deque_iterator() : circular_deque_const_iterator<T>() {}
339 
340   // Dereferencing.
341   T& operator*() const { return const_cast<T&>(base::operator*()); }
342   T* operator->() const { return const_cast<T*>(base::operator->()); }
343   T& operator[](difference_type i) {
344     return const_cast<T&>(base::operator[](i));
345   }
346 
347   // Random access mutation.
348   friend circular_deque_iterator operator+(const circular_deque_iterator& iter,
349                                            difference_type offset) {
350     circular_deque_iterator ret = iter;
351     ret.Add(offset);
352     return ret;
353   }
354   circular_deque_iterator& operator+=(difference_type offset) {
355     base::Add(offset);
356     return *this;
357   }
358   friend circular_deque_iterator operator-(const circular_deque_iterator& iter,
359                                            difference_type offset) {
360     circular_deque_iterator ret = iter;
361     ret.Add(-offset);
362     return ret;
363   }
364   circular_deque_iterator& operator-=(difference_type offset) {
365     base::Add(-offset);
366     return *this;
367   }
368 
369   // Increment and decrement.
370   circular_deque_iterator& operator++() {
371     base::Increment();
372     return *this;
373   }
374   circular_deque_iterator operator++(int) {
375     circular_deque_iterator ret = *this;
376     base::Increment();
377     return ret;
378   }
379   circular_deque_iterator& operator--() {
380     base::Decrement();
381     return *this;
382   }
383   circular_deque_iterator operator--(int) {
384     circular_deque_iterator ret = *this;
385     base::Decrement();
386     return ret;
387   }
388 
389  private:
circular_deque_iterator(const circular_deque<T> * parent,size_t index)390   circular_deque_iterator(const circular_deque<T>* parent, size_t index)
391       : circular_deque_const_iterator<T>(parent, index) {}
392 };
393 
394 }  // namespace internal
395 
396 template <typename T>
397 class circular_deque {
398  private:
399   using VectorBuffer = internal::VectorBuffer<T>;
400 
401  public:
402   using value_type = T;
403   using size_type = std::size_t;
404   using difference_type = std::ptrdiff_t;
405   using reference = value_type&;
406   using const_reference = const value_type&;
407   using pointer = value_type*;
408   using const_pointer = const value_type*;
409 
410   using iterator = internal::circular_deque_iterator<T>;
411   using const_iterator = internal::circular_deque_const_iterator<T>;
412   using reverse_iterator = std::reverse_iterator<iterator>;
413   using const_reverse_iterator = std::reverse_iterator<const_iterator>;
414 
415   // ---------------------------------------------------------------------------
416   // Constructor
417 
418   constexpr circular_deque() = default;
419 
420   // Constructs with |count| copies of |value| or default constructed version.
circular_deque(size_type count)421   explicit circular_deque(size_type count) { resize(count); }
circular_deque(size_type count,const T & value)422   circular_deque(size_type count, const T& value) { resize(count, value); }
423 
424   // Range constructor.
425   template <class InputIterator>
circular_deque(InputIterator first,InputIterator last)426   circular_deque(InputIterator first, InputIterator last) {
427     assign(first, last);
428   }
429 
430   // Copy/move.
circular_deque(const circular_deque & other)431   circular_deque(const circular_deque& other) : buffer_(other.size() + 1) {
432     assign(other.begin(), other.end());
433   }
circular_deque(circular_deque && other)434   circular_deque(circular_deque&& other) noexcept
435       : buffer_(std::move(other.buffer_)),
436         begin_(other.begin_),
437         end_(other.end_) {
438     other.begin_ = 0;
439     other.end_ = 0;
440   }
441 
circular_deque(std::initializer_list<value_type> init)442   circular_deque(std::initializer_list<value_type> init) { assign(init); }
443 
~circular_deque()444   ~circular_deque() { DestructRange(begin_, end_); }
445 
446   // ---------------------------------------------------------------------------
447   // Assignments.
448   //
449   // All of these may invalidate iterators and references.
450 
451   circular_deque& operator=(const circular_deque& other) {
452     if (&other == this)
453       return *this;
454 
455     reserve(other.size());
456     assign(other.begin(), other.end());
457     return *this;
458   }
459   circular_deque& operator=(circular_deque&& other) noexcept {
460     if (&other == this)
461       return *this;
462 
463     // We're about to overwrite the buffer, so don't free it in clear to
464     // avoid doing it twice.
465     ClearRetainCapacity();
466     buffer_ = std::move(other.buffer_);
467     begin_ = other.begin_;
468     end_ = other.end_;
469 
470     other.begin_ = 0;
471     other.end_ = 0;
472 
473     IncrementGeneration();
474     return *this;
475   }
476   circular_deque& operator=(std::initializer_list<value_type> ilist) {
477     reserve(ilist.size());
478     assign(std::begin(ilist), std::end(ilist));
479     return *this;
480   }
481 
assign(size_type count,const value_type & value)482   void assign(size_type count, const value_type& value) {
483     ClearRetainCapacity();
484     reserve(count);
485     for (size_t i = 0; i < count; i++)
486       emplace_back(value);
487     IncrementGeneration();
488   }
489 
490   // This variant should be enabled only when InputIterator is an iterator.
491   template <typename InputIterator>
492   std::enable_if_t<::base::internal::is_iterator<InputIterator>::value, void>
assign(InputIterator first,InputIterator last)493   assign(InputIterator first, InputIterator last) {
494     // Possible future enhancement, dispatch on iterator tag type. For forward
495     // iterators we can use std::difference to preallocate the space required
496     // and only do one copy.
497     ClearRetainCapacity();
498     for (; first != last; ++first)
499       emplace_back(*first);
500     IncrementGeneration();
501   }
502 
assign(std::initializer_list<value_type> value)503   void assign(std::initializer_list<value_type> value) {
504     reserve(std::distance(value.begin(), value.end()));
505     assign(value.begin(), value.end());
506   }
507 
508   // ---------------------------------------------------------------------------
509   // Accessors.
510   //
511   // Since this class assumes no exceptions, at() and operator[] are equivalent.
512 
at(size_type i)513   const value_type& at(size_type i) const {
514     DCHECK(i < size());
515     size_t right_size = buffer_.capacity() - begin_;
516     if (begin_ <= end_ || i < right_size)
517       return buffer_[begin_ + i];
518     return buffer_[i - right_size];
519   }
at(size_type i)520   value_type& at(size_type i) {
521     return const_cast<value_type&>(std::as_const(*this).at(i));
522   }
523 
524   value_type& operator[](size_type i) {
525     return const_cast<value_type&>(std::as_const(*this)[i]);
526   }
527 
528   const value_type& operator[](size_type i) const { return at(i); }
529 
front()530   value_type& front() {
531     DCHECK(!empty());
532     return buffer_[begin_];
533   }
front()534   const value_type& front() const {
535     DCHECK(!empty());
536     return buffer_[begin_];
537   }
538 
back()539   value_type& back() {
540     DCHECK(!empty());
541     return *(--end());
542   }
back()543   const value_type& back() const {
544     DCHECK(!empty());
545     return *(--end());
546   }
547 
548   // ---------------------------------------------------------------------------
549   // Iterators.
550 
begin()551   iterator begin() { return iterator(this, begin_); }
begin()552   const_iterator begin() const { return const_iterator(this, begin_); }
cbegin()553   const_iterator cbegin() const { return const_iterator(this, begin_); }
554 
end()555   iterator end() { return iterator(this, end_); }
end()556   const_iterator end() const { return const_iterator(this, end_); }
cend()557   const_iterator cend() const { return const_iterator(this, end_); }
558 
rbegin()559   reverse_iterator rbegin() { return reverse_iterator(end()); }
rbegin()560   const_reverse_iterator rbegin() const {
561     return const_reverse_iterator(end());
562   }
crbegin()563   const_reverse_iterator crbegin() const { return rbegin(); }
564 
rend()565   reverse_iterator rend() { return reverse_iterator(begin()); }
rend()566   const_reverse_iterator rend() const {
567     return const_reverse_iterator(begin());
568   }
crend()569   const_reverse_iterator crend() const { return rend(); }
570 
571   // ---------------------------------------------------------------------------
572   // Memory management.
573 
574   // IMPORTANT NOTE ON reserve(...): This class implements auto-shrinking of
575   // the buffer when elements are deleted and there is "too much" wasted space.
576   // So if you call reserve() with a large size in anticipation of pushing many
577   // elements, but pop an element before the queue is full, the capacity you
578   // reserved may be lost.
579   //
580   // As a result, it's only worthwhile to call reserve() when you're adding
581   // many things at once with no intermediate operations.
reserve(size_type new_capacity)582   void reserve(size_type new_capacity) {
583     if (new_capacity > capacity())
584       SetCapacityTo(new_capacity);
585   }
586 
capacity()587   size_type capacity() const {
588     // One item is wasted to indicate end().
589     return buffer_.capacity() == 0 ? 0 : buffer_.capacity() - 1;
590   }
591 
shrink_to_fit()592   void shrink_to_fit() {
593     if (empty()) {
594       // Optimize empty case to really delete everything if there was
595       // something.
596       if (buffer_.capacity())
597         buffer_ = VectorBuffer();
598     } else {
599       SetCapacityTo(size());
600     }
601   }
602 
603   // ---------------------------------------------------------------------------
604   // Size management.
605 
606   // This will additionally reset the capacity() to 0.
clear()607   void clear() {
608     // This can't resize(0) because that requires a default constructor to
609     // compile, which not all contained classes may implement.
610     ClearRetainCapacity();
611     buffer_ = VectorBuffer();
612   }
613 
empty()614   bool empty() const { return begin_ == end_; }
615 
size()616   size_type size() const {
617     if (begin_ <= end_)
618       return end_ - begin_;
619     return buffer_.capacity() - begin_ + end_;
620   }
621 
622   // When reducing size, the elements are deleted from the end. When expanding
623   // size, elements are added to the end with |value| or the default
624   // constructed version. Even when using resize(count) to shrink, a default
625   // constructor is required for the code to compile, even though it will not
626   // be called.
627   //
628   // There are two versions rather than using a default value to avoid
629   // creating a temporary when shrinking (when it's not needed). Plus if
630   // the default constructor is desired when expanding usually just calling it
631   // for each element is faster than making a default-constructed temporary and
632   // copying it.
resize(size_type count)633   void resize(size_type count) {
634     // SEE BELOW VERSION if you change this. The code is mostly the same.
635     if (count > size()) {
636       // This could be slighly more efficient but expanding a queue with
637       // identical elements is unusual and the extra computations of emplacing
638       // one-by-one will typically be small relative to calling the constructor
639       // for every item.
640       ExpandCapacityIfNecessary(count - size());
641       while (size() < count)
642         emplace_back();
643     } else if (count < size()) {
644       size_t new_end = (begin_ + count) % buffer_.capacity();
645       DestructRange(new_end, end_);
646       end_ = new_end;
647 
648       ShrinkCapacityIfNecessary();
649     }
650     IncrementGeneration();
651   }
resize(size_type count,const value_type & value)652   void resize(size_type count, const value_type& value) {
653     // SEE ABOVE VERSION if you change this. The code is mostly the same.
654     if (count > size()) {
655       ExpandCapacityIfNecessary(count - size());
656       while (size() < count)
657         emplace_back(value);
658     } else if (count < size()) {
659       size_t new_end = (begin_ + count) % buffer_.capacity();
660       DestructRange(new_end, end_);
661       end_ = new_end;
662 
663       ShrinkCapacityIfNecessary();
664     }
665     IncrementGeneration();
666   }
667 
668   // ---------------------------------------------------------------------------
669   // Insert and erase.
670   //
671   // Insertion and deletion in the middle is O(n) and invalidates all existing
672   // iterators.
673   //
674   // The implementation of insert isn't optimized as much as it could be. If
675   // the insertion requires that the buffer be grown, it will first be grown
676   // and everything moved, and then the items will be inserted, potentially
677   // moving some items twice. This simplifies the implemntation substantially
678   // and means less generated templatized code. Since this is an uncommon
679   // operation for deques, and already relatively slow, it doesn't seem worth
680   // the benefit to optimize this.
681 
insert(const_iterator pos,size_type count,const T & value)682   void insert(const_iterator pos, size_type count, const T& value) {
683     ValidateIterator(pos);
684 
685     // Optimize insert at the beginning.
686     if (pos == begin()) {
687       ExpandCapacityIfNecessary(count);
688       for (size_t i = 0; i < count; i++)
689         push_front(value);
690       return;
691     }
692 
693     iterator insert_cur(this, pos.index_);
694     iterator insert_end;
695     MakeRoomFor(count, &insert_cur, &insert_end);
696     while (insert_cur < insert_end) {
697       new (&buffer_[insert_cur.index_]) T(value);
698       ++insert_cur;
699     }
700 
701     IncrementGeneration();
702   }
703 
704   // This enable_if keeps this call from getting confused with the (pos, count,
705   // value) version when value is an integer.
706   template <class InputIterator>
707   std::enable_if_t<::base::internal::is_iterator<InputIterator>::value, void>
insert(const_iterator pos,InputIterator first,InputIterator last)708   insert(const_iterator pos, InputIterator first, InputIterator last) {
709     ValidateIterator(pos);
710 
711     const difference_type inserted_items_signed = std::distance(first, last);
712     if (inserted_items_signed == 0)
713       return;  // Can divide by 0 when doing modulo below, so return early.
714     CHECK(inserted_items_signed > 0);
715     const size_type inserted_items =
716         static_cast<size_type>(inserted_items_signed);
717 
718     // Make a hole to copy the items into.
719     iterator insert_cur;
720     iterator insert_end;
721     if (pos == begin()) {
722       // Optimize insert at the beginning, nothing needs to be shifted and the
723       // hole is the |inserted_items| block immediately before |begin_|.
724       ExpandCapacityIfNecessary(inserted_items);
725       insert_end = begin();
726       begin_ =
727           (begin_ + buffer_.capacity() - inserted_items) % buffer_.capacity();
728       insert_cur = begin();
729     } else {
730       insert_cur = iterator(this, pos.index_);
731       MakeRoomFor(inserted_items, &insert_cur, &insert_end);
732     }
733 
734     // Copy the items.
735     while (insert_cur < insert_end) {
736       new (&buffer_[insert_cur.index_]) T(*first);
737       ++insert_cur;
738       ++first;
739     }
740 
741     IncrementGeneration();
742   }
743 
744   // These all return an iterator to the inserted item. Existing iterators will
745   // be invalidated.
insert(const_iterator pos,const T & value)746   iterator insert(const_iterator pos, const T& value) {
747     return emplace(pos, value);
748   }
insert(const_iterator pos,T && value)749   iterator insert(const_iterator pos, T&& value) {
750     return emplace(pos, std::move(value));
751   }
752   template <class... Args>
emplace(const_iterator pos,Args &&...args)753   iterator emplace(const_iterator pos, Args&&... args) {
754     ValidateIterator(pos);
755 
756     // Optimize insert at beginning which doesn't require shifting.
757     if (pos == cbegin()) {
758       emplace_front(std::forward<Args>(args)...);
759       return begin();
760     }
761 
762     // Do this before we make the new iterators we return.
763     IncrementGeneration();
764 
765     iterator insert_begin(this, pos.index_);
766     iterator insert_end;
767     MakeRoomFor(1, &insert_begin, &insert_end);
768     new (&buffer_[insert_begin.index_]) T(std::forward<Args>(args)...);
769 
770     return insert_begin;
771   }
772 
773   // Calling erase() won't automatically resize the buffer smaller like resize
774   // or the pop functions. Erase is slow and relatively uncommon, and for
775   // normal deque usage a pop will normally be done on a regular basis that
776   // will prevent excessive buffer usage over long periods of time. It's not
777   // worth having the extra code for every template instantiation of erase()
778   // to resize capacity downward to a new buffer.
erase(const_iterator pos)779   iterator erase(const_iterator pos) { return erase(pos, pos + 1); }
erase(const_iterator first,const_iterator last)780   iterator erase(const_iterator first, const_iterator last) {
781     ValidateIterator(first);
782     ValidateIterator(last);
783 
784     IncrementGeneration();
785 
786     // First, call the destructor on the deleted items.
787     if (first.index_ == last.index_) {
788       // Nothing deleted. Need to return early to avoid falling through to
789       // moving items on top of themselves.
790       return iterator(this, first.index_);
791     } else if (first.index_ < last.index_) {
792       // Contiguous range.
793       buffer_.DestructRange(&buffer_[first.index_], &buffer_[last.index_]);
794     } else {
795       // Deleted range wraps around.
796       buffer_.DestructRange(&buffer_[first.index_],
797                             &buffer_[buffer_.capacity()]);
798       buffer_.DestructRange(&buffer_[0], &buffer_[last.index_]);
799     }
800 
801     if (first.index_ == begin_) {
802       // This deletion is from the beginning. Nothing needs to be copied, only
803       // begin_ needs to be updated.
804       begin_ = last.index_;
805       return iterator(this, last.index_);
806     }
807 
808     // In an erase operation, the shifted items all move logically to the left,
809     // so move them from left-to-right.
810     iterator move_src(this, last.index_);
811     iterator move_src_end = end();
812     iterator move_dest(this, first.index_);
813     for (; move_src < move_src_end; move_src++, move_dest++) {
814       buffer_.MoveRange(&buffer_[move_src.index_],
815                         &buffer_[move_src.index_ + 1],
816                         &buffer_[move_dest.index_]);
817     }
818 
819     end_ = move_dest.index_;
820 
821     // Since we did not reallocate and only changed things after the erase
822     // element(s), the input iterator's index points to the thing following the
823     // deletion.
824     return iterator(this, first.index_);
825   }
826 
827   // ---------------------------------------------------------------------------
828   // Begin/end operations.
829 
push_front(const T & value)830   void push_front(const T& value) { emplace_front(value); }
push_front(T && value)831   void push_front(T&& value) { emplace_front(std::move(value)); }
832 
push_back(const T & value)833   void push_back(const T& value) { emplace_back(value); }
push_back(T && value)834   void push_back(T&& value) { emplace_back(std::move(value)); }
835 
836   template <class... Args>
emplace_front(Args &&...args)837   reference emplace_front(Args&&... args) {
838     ExpandCapacityIfNecessary(1);
839     if (begin_ == 0)
840       begin_ = buffer_.capacity() - 1;
841     else
842       begin_--;
843     IncrementGeneration();
844     new (&buffer_[begin_]) T(std::forward<Args>(args)...);
845     return front();
846   }
847 
848   template <class... Args>
emplace_back(Args &&...args)849   reference emplace_back(Args&&... args) {
850     ExpandCapacityIfNecessary(1);
851     new (&buffer_[end_]) T(std::forward<Args>(args)...);
852     if (end_ == buffer_.capacity() - 1)
853       end_ = 0;
854     else
855       end_++;
856     IncrementGeneration();
857     return back();
858   }
859 
pop_front()860   void pop_front() {
861     DCHECK(size());
862     buffer_.DestructRange(&buffer_[begin_], &buffer_[begin_ + 1]);
863     begin_++;
864     if (begin_ == buffer_.capacity())
865       begin_ = 0;
866 
867     ShrinkCapacityIfNecessary();
868 
869     // Technically popping will not invalidate any iterators since the
870     // underlying buffer will be stable. But in the future we may want to add a
871     // feature that resizes the buffer smaller if there is too much wasted
872     // space. This ensures we can make such a change safely.
873     IncrementGeneration();
874   }
pop_back()875   void pop_back() {
876     DCHECK(size());
877     if (end_ == 0)
878       end_ = buffer_.capacity() - 1;
879     else
880       end_--;
881     buffer_.DestructRange(&buffer_[end_], &buffer_[end_ + 1]);
882 
883     ShrinkCapacityIfNecessary();
884 
885     // See pop_front comment about why this is here.
886     IncrementGeneration();
887   }
888 
889   // ---------------------------------------------------------------------------
890   // General operations.
891 
swap(circular_deque & other)892   void swap(circular_deque& other) {
893     std::swap(buffer_, other.buffer_);
894     std::swap(begin_, other.begin_);
895     std::swap(end_, other.end_);
896     IncrementGeneration();
897   }
898 
swap(circular_deque & lhs,circular_deque & rhs)899   friend void swap(circular_deque& lhs, circular_deque& rhs) { lhs.swap(rhs); }
900 
901  private:
902   friend internal::circular_deque_iterator<T>;
903   friend internal::circular_deque_const_iterator<T>;
904 
905   // Moves the items in the given circular buffer to the current one. The
906   // source is moved from so will become invalid. The destination buffer must
907   // have already been allocated with enough size.
MoveBuffer(VectorBuffer & from_buf,size_t from_begin,size_t from_end,VectorBuffer * to_buf,size_t * to_begin,size_t * to_end)908   static void MoveBuffer(VectorBuffer& from_buf,
909                          size_t from_begin,
910                          size_t from_end,
911                          VectorBuffer* to_buf,
912                          size_t* to_begin,
913                          size_t* to_end) {
914     size_t from_capacity = from_buf.capacity();
915 
916     *to_begin = 0;
917     if (from_begin < from_end) {
918       // Contiguous.
919       from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_end],
920                          to_buf->begin());
921       *to_end = from_end - from_begin;
922     } else if (from_begin > from_end) {
923       // Discontiguous, copy the right side to the beginning of the new buffer.
924       from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_capacity],
925                          to_buf->begin());
926       size_t right_size = from_capacity - from_begin;
927       // Append the left side.
928       from_buf.MoveRange(&from_buf[0], &from_buf[from_end],
929                          &(*to_buf)[right_size]);
930       *to_end = right_size + from_end;
931     } else {
932       // No items.
933       *to_end = 0;
934     }
935   }
936 
937   // Expands the buffer size. This assumes the size is larger than the
938   // number of elements in the vector (it won't call delete on anything).
SetCapacityTo(size_t new_capacity)939   void SetCapacityTo(size_t new_capacity) {
940     // Use the capacity + 1 as the internal buffer size to differentiate
941     // empty and full (see definition of buffer_ below).
942     VectorBuffer new_buffer(new_capacity + 1);
943     MoveBuffer(buffer_, begin_, end_, &new_buffer, &begin_, &end_);
944     buffer_ = std::move(new_buffer);
945   }
ExpandCapacityIfNecessary(size_t additional_elts)946   void ExpandCapacityIfNecessary(size_t additional_elts) {
947     size_t min_new_capacity = size() + additional_elts;
948     if (capacity() >= min_new_capacity)
949       return;  // Already enough room.
950 
951     min_new_capacity =
952         std::max(min_new_capacity, internal::kCircularBufferInitialCapacity);
953 
954     // std::vector always grows by at least 50%. WTF::Deque grows by at least
955     // 25%. We expect queue workloads to generally stay at a similar size and
956     // grow less than a vector might, so use 25%.
957     size_t new_capacity =
958         std::max(min_new_capacity, capacity() + capacity() / 4);
959     SetCapacityTo(new_capacity);
960   }
961 
ShrinkCapacityIfNecessary()962   void ShrinkCapacityIfNecessary() {
963     // Don't auto-shrink below this size.
964     if (capacity() <= internal::kCircularBufferInitialCapacity)
965       return;
966 
967     // Shrink when 100% of the size() is wasted.
968     size_t sz = size();
969     size_t empty_spaces = capacity() - sz;
970     if (empty_spaces < sz)
971       return;
972 
973     // Leave 1/4 the size as free capacity, not going below the initial
974     // capacity.
975     size_t new_capacity =
976         std::max(internal::kCircularBufferInitialCapacity, sz + sz / 4);
977     if (new_capacity < capacity()) {
978       // Count extra item to convert to internal capacity.
979       SetCapacityTo(new_capacity);
980     }
981   }
982 
983   // Backend for clear() but does not resize the internal buffer.
ClearRetainCapacity()984   void ClearRetainCapacity() {
985     // This can't resize(0) because that requires a default constructor to
986     // compile, which not all contained classes may implement.
987     DestructRange(begin_, end_);
988     begin_ = 0;
989     end_ = 0;
990     IncrementGeneration();
991   }
992 
993   // Calls destructors for the given begin->end indices. The indices may wrap
994   // around. The buffer is not resized, and the begin_ and end_ members are
995   // not changed.
DestructRange(size_t begin,size_t end)996   void DestructRange(size_t begin, size_t end) {
997     if (end == begin) {
998       return;
999     } else if (end > begin) {
1000       buffer_.DestructRange(&buffer_[begin], &buffer_[end]);
1001     } else {
1002       buffer_.DestructRange(&buffer_[begin], &buffer_[buffer_.capacity()]);
1003       buffer_.DestructRange(&buffer_[0], &buffer_[end]);
1004     }
1005   }
1006 
1007   // Makes room for |count| items starting at |*insert_begin|. Since iterators
1008   // are not stable across buffer resizes, |*insert_begin| will be updated to
1009   // point to the beginning of the newly opened position in the new array (it's
1010   // in/out), and the end of the newly opened position (it's out-only).
MakeRoomFor(size_t count,iterator * insert_begin,iterator * insert_end)1011   void MakeRoomFor(size_t count, iterator* insert_begin, iterator* insert_end) {
1012     if (count == 0) {
1013       *insert_end = *insert_begin;
1014       return;
1015     }
1016 
1017     // The offset from the beginning will be stable across reallocations.
1018     size_t begin_offset = insert_begin->OffsetFromBegin();
1019     ExpandCapacityIfNecessary(count);
1020 
1021     insert_begin->index_ = (begin_ + begin_offset) % buffer_.capacity();
1022     *insert_end =
1023         iterator(this, (insert_begin->index_ + count) % buffer_.capacity());
1024 
1025     // Update the new end and prepare the iterators for copying.
1026     iterator src = end();
1027     end_ = (end_ + count) % buffer_.capacity();
1028     iterator dest = end();
1029 
1030     // Move the elements. This will always involve shifting logically to the
1031     // right, so move in a right-to-left order.
1032     while (true) {
1033       if (src == *insert_begin)
1034         break;
1035       --src;
1036       --dest;
1037       buffer_.MoveRange(&buffer_[src.index_], &buffer_[src.index_ + 1],
1038                         &buffer_[dest.index_]);
1039     }
1040   }
1041 
1042 #if DCHECK_IS_ON()
1043   // Asserts the given index is dereferencable. The index is an index into the
1044   // buffer, not an index used by operator[] or at() which will be offsets from
1045   // begin.
CheckValidIndex(size_t i)1046   void CheckValidIndex(size_t i) const {
1047     if (begin_ <= end_)
1048       DCHECK(i >= begin_ && i < end_);
1049     else
1050       DCHECK((i >= begin_ && i < buffer_.capacity()) || i < end_);
1051   }
1052 
1053   // Asserts the given index is either dereferencable or points to end().
CheckValidIndexOrEnd(size_t i)1054   void CheckValidIndexOrEnd(size_t i) const {
1055     if (i != end_)
1056       CheckValidIndex(i);
1057   }
1058 
ValidateIterator(const const_iterator & i)1059   void ValidateIterator(const const_iterator& i) const {
1060     DCHECK(i.parent_deque_ == this);
1061     i.CheckUnstableUsage();
1062   }
1063 
1064   // See generation_ below.
IncrementGeneration()1065   void IncrementGeneration() { generation_++; }
1066 #else
1067   // No-op versions of these functions for release builds.
CheckValidIndex(size_t)1068   void CheckValidIndex(size_t) const {}
CheckValidIndexOrEnd(size_t)1069   void CheckValidIndexOrEnd(size_t) const {}
ValidateIterator(const const_iterator & i)1070   void ValidateIterator(const const_iterator& i) const {}
IncrementGeneration()1071   void IncrementGeneration() {}
1072 #endif
1073 
1074   // Danger, the buffer_.capacity() is the "internal capacity" which is
1075   // capacity() + 1 since there is an extra item to indicate the end. Otherwise
1076   // being completely empty and completely full are indistinguishable (begin ==
1077   // end). We could add a separate flag to avoid it, but that adds significant
1078   // extra complexity since every computation will have to check for it. Always
1079   // keeping one extra unused element in the buffer makes iterator computations
1080   // much simpler.
1081   //
1082   // Container internal code will want to use buffer_.capacity() for offset
1083   // computations rather than capacity().
1084   VectorBuffer buffer_;
1085   size_type begin_ = 0;
1086   size_type end_ = 0;
1087 
1088 #if DCHECK_IS_ON()
1089   // Incremented every time a modification is made that could affect iterator
1090   // invalidations.
1091   uint64_t generation_ = 0;
1092 #endif
1093 };
1094 
1095 // Implementations of base::Erase[If] (see base/stl_util.h).
1096 template <class T, class Value>
Erase(circular_deque<T> & container,const Value & value)1097 size_t Erase(circular_deque<T>& container, const Value& value) {
1098   auto it = ranges::remove(container, value);
1099   size_t removed = std::distance(it, container.end());
1100   container.erase(it, container.end());
1101   return removed;
1102 }
1103 
1104 template <class T, class Predicate>
EraseIf(circular_deque<T> & container,Predicate pred)1105 size_t EraseIf(circular_deque<T>& container, Predicate pred) {
1106   auto it = ranges::remove_if(container, pred);
1107   size_t removed = std::distance(it, container.end());
1108   container.erase(it, container.end());
1109   return removed;
1110 }
1111 
1112 }  // namespace base
1113 
1114 #endif  // BASE_CONTAINERS_CIRCULAR_DEQUE_H_
1115