The Sparta Modeling Framework
Loading...
Searching...
No Matches
Buffer.hpp
Go to the documentation of this file.
1// <Buffer.h> -*- C++ -*-
2
3
10#pragma once
11
12#include <cinttypes>
13#include <vector>
14#include <algorithm>
15#include <type_traits>
16
22#include "sparta/collection/IterableCollector.hpp"
23#include "sparta/statistics/Counter.hpp"
25
26namespace sparta
27{
28
72 template <class DataT>
73 class Buffer
74 {
75 public:
76
77 // A typedef for this Buffer's type. This is useful for my
78 // subclasses, BufferIterator & EntryValidator
80
81 // Typedef for the DataT
82 typedef DataT value_type;
83
84 // Typedef for size_type
85 typedef uint32_t size_type;
86
87 private:
88
96 struct DataPointer {
97 private:
98 alignas(value_type) std::byte object_memory_[sizeof(value_type)];
99
100 public:
101 DataPointer() { }
102
103 DataPointer(DataPointer &&orig) {
104 ::memcpy(&object_memory_, &orig.object_memory_, sizeof(object_memory_));
105 data = reinterpret_cast<value_type*>(&object_memory_);
106 }
107
108 // No copies, only moves
109 DataPointer(const DataPointer &) = delete;
110
111 template<typename U>
112 void allocate(U && dat) {
113 data = new (&object_memory_) value_type(std::forward<U>(dat));
114 }
115
116 value_type * data = nullptr;
117 DataPointer* next_free = nullptr;
118 uint32_t physical_idx = 0;
119 };
120 //Forward Declaration
121 struct DataPointerValidator;
122
137 template <bool is_const_iterator = true>
138 class BufferIterator : public utils::IteratorTraits<std::bidirectional_iterator_tag, value_type>
139 {
140 private:
141 friend class Buffer<value_type>;
142 typedef typename std::conditional<is_const_iterator,
143 const value_type &, value_type &>::type DataReferenceType;
144 typedef typename std::conditional<is_const_iterator,
145 const BufferType *, BufferType *>::type BufferPointerType;
146 typedef typename std::conditional<is_const_iterator,
147 const DataPointer *, DataPointer *>::type DataPointerType;
154 uint32_t getIndex_() const {
155 if(buffer_entry_ == nullptr) {
156 return attached_buffer_->capacity();
157 }
158 return buffer_entry_->physical_idx;
159 }
160
162 BufferPointerType attached_buffer_ = nullptr;
163
165 DataPointerType buffer_entry_ = nullptr;
166
172 BufferIterator(BufferPointerType buffer, DataPointerType entry) :
173 attached_buffer_(buffer),
174 buffer_entry_(entry)
175 {}
176
177
178 public:
179
183 BufferIterator() = default;
184
189 BufferIterator(const BufferIterator<false> & iter) :
190 attached_buffer_(iter.attached_buffer_),
191 buffer_entry_(iter.buffer_entry_)
192 {}
193
198 BufferIterator(const BufferIterator<true> & iter) :
199 attached_buffer_(iter.attached_buffer_),
200 buffer_entry_(iter.buffer_entry_)
201 {}
202
206 BufferIterator& operator=(const BufferIterator&) = default;
207
209 bool operator<(const BufferIterator& rhs) const
210 {
211 sparta_assert(attached_buffer_ == rhs.attached_buffer_,
212 "Cannot compare BufferIterators created by different buffers.");
213 return getIndex_() < rhs.getIndex_();
214 }
215
217 bool operator>(const BufferIterator& rhs) const
218 {
219 sparta_assert(attached_buffer_ == rhs.attached_buffer_,
220 "Cannot compare BufferIterators created by different buffers.");
221 return getIndex_() > rhs.getIndex_();
222 }
223
225 bool operator==(const BufferIterator& rhs) const
226 {
227 sparta_assert(attached_buffer_ == rhs.attached_buffer_,
228 "Cannot compare BufferIterators created by different buffers.");
229 return (buffer_entry_ == rhs.buffer_entry_);
230 }
231
233 bool operator!=(const BufferIterator& rhs) const
234 {
235 sparta_assert(attached_buffer_ == rhs.attached_buffer_,
236 "Cannot compare BufferIterators created by different buffers.");
237 return !operator==(rhs);
238 }
239
242 bool isValid() const
243 {
244 if(buffer_entry_ != nullptr) {
245 return attached_buffer_->validator_->isValid(buffer_entry_);
246 }
247 return false;
248 }
249
251 DataReferenceType operator* () const {
252 sparta_assert(attached_buffer_,
253 "The iterator is not attached to a buffer. Was it initialized?");
254 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
255 return *(buffer_entry_->data);
256 }
257
259 value_type * operator -> () {
260 sparta_assert(attached_buffer_,
261 "The iterator is not attached to a buffer. Was it initialized?");
262 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
263 return buffer_entry_->data;
264 }
265
266 value_type const * operator -> () const {
267 sparta_assert(attached_buffer_,
268 "The iterator is not attached to a buffer. Was it initialized?");
269 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
270 return buffer_entry_->data;
271 }
272
275 BufferIterator & operator++() {
276 sparta_assert(attached_buffer_,
277 "The iterator is not attached to a buffer. Was it initialized?");
278 sparta_assert(isValid(), "Incrementing an iterator that is not valid");
279 const uint32_t idx = buffer_entry_->physical_idx + 1;
280 if(attached_buffer_->isValid(idx)) {
281 buffer_entry_ = attached_buffer_->buffer_map_[idx];
282 }
283 else {
284 buffer_entry_ = nullptr;
285 }
286 return *this;
287 }
288
290 BufferIterator operator++ (int) {
291 BufferIterator buf_iter(*this);
292 operator++();
293 return buf_iter;
294 }
295
297 BufferIterator & operator-- ()
298 {
299 sparta_assert(attached_buffer_, "The iterator is not attached to a buffer. Was it initialized?");
300 if(isValid()) {
301 uint32_t idx = buffer_entry_->physical_idx;
302 --idx;
303 if(attached_buffer_->isValid(idx)) {
304 buffer_entry_ = attached_buffer_->buffer_map_[idx];
305 }
306 else {
307 sparta_assert(idx < attached_buffer_->capacity(), "Decrementing the iterator results in buffer underrun");
308 buffer_entry_ = nullptr;
309 }
310 }
311 else if (attached_buffer_->size()) {
312 buffer_entry_ = attached_buffer_->buffer_map_[attached_buffer_->size()-1];
313 }
314 return *this;
315 }
316
318 BufferIterator operator-- (int) {
319 BufferIterator buf_iter(*this);
320 operator--();
321 return buf_iter;
322 }
323
328 friend class BufferIterator<true>;
329 };
330
331 public:
332
334 typedef BufferIterator<false> iterator;
335
337 typedef BufferIterator<true> const_iterator;
338
340 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
341
343 typedef std::reverse_iterator<iterator> reverse_iterator;
344
382 Buffer(const std::string & name,
383 const uint32_t num_entries,
384 const Clock * clk,
385 StatisticSet * statset = nullptr,
390
392 Buffer(const Buffer<value_type> & ) = delete;
393
396
399
401 ~Buffer() { clear(); }
402
406 const std::string & getName() const {
407 return name_;
408 }
409
416 bool isValid(uint32_t idx) const {
417 return idx < size();
418 }
419
425 const value_type & read(uint32_t idx) const {
427 return *(buffer_map_[idx]->data);
428 }
429
434 const value_type & read(const const_iterator & entry) const
435 {
436 return read(entry.getIndex_());
437 }
438
443 const value_type & read(const const_reverse_iterator & entry) const
444 {
445 return read(std::prev(entry.base()));
446 }
447
454 value_type & access(uint32_t idx) {
456 return *(buffer_map_[idx]->data);
457 }
458
463 value_type & access(const const_iterator & entry) {
464 return access(entry.getIndex_());
465 }
466
471 value_type & access(const const_reverse_iterator & entry) {
472 return access(std::prev(entry.base()));
473 }
474
480 value_type & accessBack() {
481 sparta_assert(isValid(num_valid_ - 1));
482 return *(buffer_map_[num_valid_ - 1]->data);
483 }
484
489 size_type capacity() const {
490 return num_entries_;
491 }
492
497 size_type size() const {
498 return num_valid_;
499 }
500
507 size_type numFree() const {
508 return capacity() - size();
509 }
510
520 iterator push_back(const value_type& dat)
521 {
522 return push_backImpl_(dat);
523 }
524
534 iterator push_back(value_type&& dat)
535 {
536 return push_backImpl_(std::move(dat));
537 }
538
557 iterator insert(uint32_t idx, const value_type& dat)
558 {
559 return insertImpl_(idx, dat);
560 }
561
580 iterator insert(uint32_t idx, value_type&& dat)
581 {
582 return insertImpl_(idx, std::move(dat));
583 }
584
586 iterator insert(const const_iterator & entry, const value_type& dat)
587 {
588 return insert(entry.getIndex_(), dat);
589 }
590
592 iterator insert(const const_iterator & entry, value_type&& dat)
593 {
594 return insert(entry.getIndex_(), std::move(dat));
595 }
596
598 iterator insert(const const_reverse_iterator & entry, const value_type& dat)
599 {
600 return insert(entry.base().getIndex_(), dat);
601 }
602
604 iterator insert(const const_reverse_iterator & entry, value_type&& dat)
605 {
606 return insert(entry.base().getIndex_(), std::move(dat));
607 }
608
623 void erase(uint32_t idx)
624 {
625 // Make sure we are invalidating an already valid object.
626 sparta_assert(idx < size(),
627 "Cannot erase an index that is not already valid");
628
629 // Do the invalidation immediately
630 // 1. Move the free space pointer to the erased position.
631 // 2. Call the DataT's destructor
632 // 3. Set the current free space pointer's next to the old free position
633 DataPointer* oldFree = free_position_;
634 free_position_ = buffer_map_[idx];
635 free_position_->data->~value_type();
636 free_position_->next_free = oldFree;
637
638 // Mark DataPointer as invalid
639 validator_->detachDataPointer(free_position_);
640
641 // Shift all the positions above the invalidation in the map one space down.
642 sparta_assert(num_valid_ > 0);
643 const uint32_t top_idx_of_buffer = num_valid_ - 1;
644 while(idx < top_idx_of_buffer)
645 {
646 // assert that we are not going to do an invalid read.
647 sparta_assert(idx + 1 < num_entries_);
648 buffer_map_[idx] = buffer_map_[idx + 1];
649 buffer_map_[idx]->physical_idx = idx;
650
651 // Shift the indexes in the address map.
652 address_map_[idx] = address_map_[idx + 1];
653 ++idx;
654 }
655
656 // the entry at the old num_valid_ in the map now points to nullptr
657 buffer_map_[top_idx_of_buffer] = nullptr;
658
659 // Remove this entry of the address map as it becomes a free position.
660 address_map_.erase(top_idx_of_buffer);
661
662 // update counts.
663 --num_valid_;
664 updateUtilizationCounters_();
665 }
666
672 {
673 sparta_assert(entry.attached_buffer_ == this,
674 "Cannot erase an entry created by another Buffer");
675 // erase the index in the actual buffer.
676 erase(entry.getIndex_());
677 return {this, buffer_map_[entry.getIndex_()]};
678 }
679
685 {
686 return reverse_iterator{erase(std::prev(entry.base()))};
687 }
688
693 void clear()
694 {
695 num_valid_ = 0;
696 std::for_each(buffer_map_.begin(), buffer_map_.end(),
697 [] (auto map_entry)
698 {
699 if(map_entry) {
700 map_entry->data->~value_type();
701 }
702 });
703 std::fill(buffer_map_.begin(), buffer_map_.end(), nullptr);
704 for(uint32_t i = 0; i < data_pool_size_ - 1; ++i) {
705 data_pool_[i].next_free = &data_pool_[i + 1];
706 }
707 data_pool_[data_pool_size_ - 1].next_free = &data_pool_[data_pool_size_ - 1];
708 free_position_ = &data_pool_[0];
709 first_position_ = &data_pool_[0];
710 validator_->clear();
711 address_map_.clear();
712 updateUtilizationCounters_();
713 }
714
719 bool empty() const
720 {
721 return num_valid_ == 0;
722 }
723
733 void enableCollection(TreeNode * parent) {
734 collector_.
735 reset(new collection::IterableCollector<Buffer<DataT> >(parent, getName(),
736 this, capacity()));
737 }
738
744 if(size()) {
745 sparta_assert(buffer_map_[0]);
746 return iterator(this, buffer_map_[0]);
747 }
748 return end();
749 }
750
755 iterator end() { return iterator(this, nullptr);}
756
762 if(size()) {
763 return const_iterator(this, buffer_map_[0]);
764 }
765 return end();
766 }
767
772 const_iterator end() const { return const_iterator(this, nullptr);}
773
779 return reverse_iterator(end());
780 }
781
787
793 return const_reverse_iterator(end());
794 }
795
801
808 void makeInfinite(const uint32_t resize_delta = 1) {
809 is_infinite_mode_ = true;
810 resize_delta_ = resize_delta;
811 }
812
813 private:
814
815 typedef std::vector<DataPointer> DataPool;
816 typedef std::vector<DataPointer*> PointerList;
817
818 struct DataPointerValidator
819 {
821 // data_pool_ of the Buffer class.
822 const DataPool * data_pool_;
823 std::vector<uint32_t> validator_;
824 size_type getIndex_(const DataPointer * dp)const {
825 auto i = (dp - &(*data_pool_)[0]);
826 return static_cast<size_type>(i);
827 }
828
829 DataPointerValidator(const Buffer &b):
830 data_pool_(&b.data_pool_),
831 validator_(b.num_entries_, 0)
832 {}
833
834 void attachDataPointer(const DataPointer* dp){
835 validator_[getIndex_(dp)] = 1;
836 }
837
838 bool isValid(const DataPointer * dp) const {
839 return bool(validator_[getIndex_(dp)]);
840 }
841
842 void detachDataPointer(DataPointer * dp) {
843 validator_[getIndex_(dp)] = 0;
844 }
845
846 void clear() {
847 std::fill(validator_.begin(), validator_.end(), 0);
848 }
849
855 void resizeIteratorValidator(const uint32_t resize_delta,
856 const DataPool & data_pool) {
857 validator_.resize(validator_.capacity() + resize_delta);
858 data_pool_ = &data_pool;
859 }
860 };
861
862 void updateUtilizationCounters_() {
863 // Update Counters
864 if(utilization_) {
865 utilization_->setValue(num_valid_);
866 }
867 }
868
875 void resizeInternalContainers_() {
876
877 // Assert that the Buffer class is in Infinite-Mode.
878 sparta_assert(is_infinite_mode_,
879 "The Buffer class must be in Infinite-Mode in order to resize itself.");
880
881 // We do not resize if there are available slots in buffer.
882 if(numFree() != 0) {
883 return;
884 }
885
886 // Resize the buffer_map_ with the amount provided by user.
887 buffer_map_.resize(buffer_map_.capacity() + resize_delta_);
888
889 // The number of entries the buffer can hold is its capacity.
890 num_entries_ = buffer_map_.capacity();
891
892 // Resize the data_pool_ to twice the capacity of the buffer_map_.
893 data_pool_.resize(num_entries_ * 2);
894
895 // The number of entries the pool can hold is its capacity.
896 data_pool_size_ = data_pool_.capacity();
897
898
899 // Each entry in data_pool_ should have their next free position
900 // pointer point to the slot to its right.
901 for(uint32_t i = 0; i < data_pool_size_ - 1; ++i) {
902 data_pool_[i].next_free = &data_pool_[i + 1];
903 }
904
905 // The last entry should point to itself as the next free position.
906 data_pool_[data_pool_size_ - 1].next_free = &data_pool_[data_pool_size_ - 1];
907
908 // The first position should point to the first entry.
909 first_position_ = &data_pool_[0];
910
911 // The free position should point to the location according to
912 // the number of entries in the buffer.
913 free_position_ = &data_pool_[num_valid_];
914
915 // Make all the pointers in buffer_map_ point to the appropriate indexes.
916 for(uint32_t i = 0; i < num_valid_; ++i) {
917 buffer_map_[i] = &data_pool_[address_map_[i]];
918 }
919
920 // Resize the validator vector and relink the validator data pool.
921 validator_->resizeIteratorValidator(resize_delta_, data_pool_);
922 }
923
924 template<typename U>
925 iterator push_backImpl_(U&& dat)
926 {
927
928 // Check to see if the vectors need to be resized and relinked.
929 if(SPARTA_EXPECT_FALSE(is_infinite_mode_)) {
930 resizeInternalContainers_();
931 }
932 sparta_assert(numFree(), "Buffer exhausted");
933 sparta_assert(free_position_ != nullptr);
934 free_position_->allocate(std::forward<U>(dat));
935 free_position_->physical_idx = num_valid_;
936
937 // Create the entry to be returned.
938 iterator entry(this, free_position_);
939
940 // Do the append now. We can do this with different logic
941 // that does not require a process.
942 buffer_map_[num_valid_] = free_position_;
943
944 // Store the index in the data_pool_ to which current
945 // free_position_ points to. We need to relink all these
946 // pointers once the data_pool_ resizes.
947 address_map_[num_valid_] =
948 static_cast<uint32_t>(free_position_ - &data_pool_[0]);
949 //Mark this data pointer as valid
950 validator_->attachDataPointer(free_position_);
951 ++num_valid_;
952 free_position_ = free_position_->next_free;
953 updateUtilizationCounters_();
954
955 return entry;
956 }
957
958 template<typename U>
959 iterator insertImpl_(uint32_t idx, U&& dat)
960 {
961 // Check to see if the vectors need to be resized and relinked.
962 if(SPARTA_EXPECT_FALSE(is_infinite_mode_)) {
963 resizeInternalContainers_();
964 }
965 sparta_assert(numFree(), "Buffer '" << getName() << "' exhausted");
966 sparta_assert(idx <= num_valid_, "Buffer '" << getName()
967 << "': Cannot insert before a non valid index");
968 sparta_assert(free_position_ != nullptr);
969 free_position_->allocate(std::forward<U>(dat));
970 free_position_->physical_idx = idx;
971
972 //Mark this data pointer as valid
973 validator_->attachDataPointer(free_position_);
974
975 // Create the entry to be returned.
976 iterator entry(this, free_position_);
977
978 //Shift all the positions above idx in the map one space down.
979 uint32_t i = num_valid_;
980 while(i > idx)
981 {
982 //assert that we are not going to do an invalid read.
983 buffer_map_[i] = buffer_map_[i - 1];
984 buffer_map_[i]->physical_idx = i ;
985
986 // Shift the indexes in the map.
987 address_map_[i] = address_map_[i - 1];
988 --i;
989 }
990
991 buffer_map_[idx] = free_position_;
992
993 // Store the index in the data_pool_ to which current
994 // free_position_ points to. We need to relink all these
995 // pointers once the data_pool_ resizes.
996 address_map_[num_valid_] =
997 static_cast<uint32_t>(free_position_ - &data_pool_[0]);
998 ++num_valid_;
999 free_position_ = free_position_->next_free;
1000 updateUtilizationCounters_();
1001 return entry;
1002 }
1003
1004 std::string name_;
1005 const Clock * clk_ = nullptr;
1006 size_type num_entries_ = 0;
1007 PointerList buffer_map_;
1008 size_type data_pool_size_ = 0;
1009 DataPool data_pool_;
1012 DataPointer* free_position_ = nullptr;
1013 DataPointer* first_position_ = nullptr;
1014 size_type num_valid_ = 0;
1015 std::unique_ptr<DataPointerValidator> validator_;
1018 // Counters
1019 std::unique_ptr<sparta::CycleHistogramStandalone> utilization_;
1020
1022 // Collectors
1023 std::unique_ptr<collection::IterableCollector<Buffer<value_type> > > collector_;
1024
1026 // The behaviour of these methods change accordingly.
1027 bool is_infinite_mode_ {false};
1028
1030 // The additional amount of entries the vector must allocate when resizing.
1032
1034 std::unordered_map<uint32_t, uint32_t> address_map_;
1035 };
1036
1038 // Definitions...
1039 template<class DataT>
1040 Buffer<DataT>::Buffer(const std::string & name,
1041 uint32_t num_entries,
1042 const Clock * clk,
1043 StatisticSet * statset,
1044 InstrumentationNode::visibility_t stat_vis_general,
1045 InstrumentationNode::visibility_t stat_vis_detailed,
1047 InstrumentationNode::visibility_t stat_vis_avg) :
1048 name_(name),
1049 clk_(clk),
1050 num_entries_(num_entries),
1051 data_pool_size_(num_entries* 2)
1052 {
1053 if((num_entries > 0) && statset)
1054 {
1055 utilization_.reset(new CycleHistogramStandalone(statset, clk_,
1056 name_ + "_utilization",
1057 name_ + " occupancy histogram",
1058 0, num_entries, 1, 0,
1059 stat_vis_general,
1060 stat_vis_detailed,
1061 stat_vis_max,
1062 stat_vis_avg));
1063 }
1064
1065 buffer_map_.resize(num_entries_);
1066 data_pool_.resize(data_pool_size_);
1067
1068 // Must set the validator before you clear
1069 validator_.reset(new DataPointerValidator(*this));
1070 clear();
1071 }
1072
1074 template<typename DataT>
1076 name_(std::move(rval.name_)),
1077 clk_(rval.clk_),
1078 num_entries_(rval.num_entries_),
1079 buffer_map_(std::move(rval.buffer_map_)),
1080 data_pool_size_(rval.data_pool_size_),
1081 data_pool_(std::move(rval.data_pool_)),
1082 free_position_(rval.free_position_),
1083 first_position_(rval.first_position_),
1084 num_valid_(rval.num_valid_),
1085 validator_(new DataPointerValidator(*this)),
1086 utilization_(std::move(rval.utilization_)),
1087 collector_(std::move(rval.collector_)),
1088 is_infinite_mode_(rval.is_infinite_mode_),
1089 resize_delta_(std::move(rval.resize_delta_)),
1090 address_map_(std::move(rval.address_map_)){
1091 rval.clk_ = nullptr;
1092 rval.num_entries_ = 0;
1093 rval.data_pool_size_ = 0;
1094 rval.free_position_ = nullptr;
1095 rval.first_position_ = nullptr;
1096 rval.num_valid_ = 0;
1097 rval.utilization_ = nullptr;
1098 rval.collector_ = nullptr;
1099 validator_->validator_ = std::move(rval.validator_->validator_);
1100 if(collector_) {
1101 collector_->reattach(this);
1102 }
1103 }
1104}
CycleHistogram implementation using sparta CycleCounter.
Defines a few handy (and now deprecated) C++ iterator traits.
Contains a collection implementation of various compile-time metaprogramming and Type-Detection APIs ...
Set of macros for Sparta assertions. Caught by the framework.
#define sparta_assert(...)
Simple variadic assertion that will throw a sparta_exception if the condition fails.
#define SPARTA_EXPECT_FALSE(x)
A macro for hinting to the compiler a particular condition should be considered most likely false.
Contains a statistic definition (some useful information which can be computed)
Contains a StatisticInstance which refers to a StatisticDef or Counter and some local state to comput...
A data structure allowing appending at the end, beginning, or middle, but erase anywhere with collaps...
Definition Buffer.hpp:74
iterator insert(uint32_t idx, const value_type &dat)
Insert the item BEFORE the given index.
Definition Buffer.hpp:557
void clear()
Empty the contents of the Buffer.
Definition Buffer.hpp:693
size_type numFree() const
Return the number of free entries.
Definition Buffer.hpp:507
const std::string & getName() const
Name of this resource.
Definition Buffer.hpp:406
Buffer(Buffer< value_type > &&)
Move Constructor to allow moves.
Definition Buffer.hpp:1075
const value_type & read(const const_iterator &entry) const
read the entry at the BufferIterator's location
Definition Buffer.hpp:434
iterator insert(const const_iterator &entry, value_type &&dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:592
std::reverse_iterator< iterator > reverse_iterator
Typedef for constant reverse iterator.
Definition Buffer.hpp:343
const_reverse_iterator rend() const
Returns a const_reverse_iterator referring to start element in the Buffer container.
Definition Buffer.hpp:800
Buffer(const std::string &name, const uint32_t num_entries, const Clock *clk, StatisticSet *statset=nullptr, InstrumentationNode::visibility_t stat_vis_general=InstrumentationNode::AUTO_VISIBILITY, InstrumentationNode::visibility_t stat_vis_detailed=InstrumentationNode::VIS_HIDDEN, InstrumentationNode::visibility_t stat_vis_max=InstrumentationNode::AUTO_VISIBILITY, InstrumentationNode::visibility_t stat_vis_avg=InstrumentationNode::AUTO_VISIBILITY)
Construct a buffer.
Definition Buffer.hpp:1040
void erase(uint32_t idx)
erase a position in the Buffer immediately.
Definition Buffer.hpp:623
size_type size() const
Return the number of valid entries.
Definition Buffer.hpp:497
bool empty() const
Query if the buffer is empty.
Definition Buffer.hpp:719
const_iterator begin() const
Get the const_iterator pointing to the begin of Buffer.
Definition Buffer.hpp:761
iterator push_back(const value_type &dat)
Append data to the end of Buffer, and return a BufferIterator.
Definition Buffer.hpp:520
reverse_iterator rend()
Returns an reverse iterator referring to starting element in the Buffer container.
Definition Buffer.hpp:786
BufferIterator< true > const_iterator
Typedef for constant iterator.
Definition Buffer.hpp:337
void makeInfinite(const uint32_t resize_delta=1)
Makes the Buffer grow beyond its capacity. The buffer grows by adding new entries in its internal vec...
Definition Buffer.hpp:808
iterator begin()
Get the iterator pointing to the beginning of Buffer.
Definition Buffer.hpp:743
value_type & access(const const_reverse_iterator &entry)
Read and return the data at the given BufferIterator's location, reference.
Definition Buffer.hpp:471
iterator push_back(value_type &&dat)
Append data to the end of Buffer, and return a BufferIterator.
Definition Buffer.hpp:534
const_iterator end() const
Returns a const_iterator referring to past-the-end element in the Buffer container.
Definition Buffer.hpp:772
Buffer & operator=(const Buffer< value_type > &)=delete
No copies allowed for Buffer.
const value_type & read(uint32_t idx) const
Read and return the data at the given index, const reference.
Definition Buffer.hpp:425
iterator insert(const const_iterator &entry, const value_type &dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:586
const value_type & read(const const_reverse_iterator &entry) const
read the entry at the BufferIterator's location
Definition Buffer.hpp:443
reverse_iterator rbegin()
Get the iterator pointing to the pass-the-end element of Buffer.
Definition Buffer.hpp:778
iterator insert(const const_reverse_iterator &entry, value_type &&dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:604
iterator erase(const const_iterator &entry)
erase the index at which the entry exists in the Buffer.
Definition Buffer.hpp:671
value_type & access(const const_iterator &entry)
Read and return the data at the given BufferIterator's location, reference.
Definition Buffer.hpp:463
value_type & accessBack()
Read and return the data at the bottom of the Buffer.
Definition Buffer.hpp:480
std::reverse_iterator< const_iterator > const_reverse_iterator
Typedef for regular reverse iterator.
Definition Buffer.hpp:340
bool isValid(uint32_t idx) const
Determine if data at the index is valid.
Definition Buffer.hpp:416
iterator insert(uint32_t idx, value_type &&dat)
Insert the item BEFORE the given index.
Definition Buffer.hpp:580
size_type capacity() const
Return the fixed size of this buffer.
Definition Buffer.hpp:489
void enableCollection(TreeNode *parent)
Request that this queue begin collecting its contents for pipeline collection.
Definition Buffer.hpp:733
reverse_iterator erase(const const_reverse_iterator &entry)
erase the index at which the entry exists in the Buffer.
Definition Buffer.hpp:684
Buffer(const Buffer< value_type > &)=delete
No copies allowed for Buffer.
iterator insert(const const_reverse_iterator &entry, const value_type &dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:598
value_type & access(uint32_t idx)
Read and return the data at the given index, reference.
Definition Buffer.hpp:454
BufferIterator< false > iterator
Typedef for regular iterator.
Definition Buffer.hpp:334
const_reverse_iterator rbegin() const
Get the const_reverse_iterator pointing to the pass-the-end of Buffer.
Definition Buffer.hpp:792
iterator end()
Returns an iterator referring to past-the-end element in the Buffer container.
Definition Buffer.hpp:755
~Buffer()
Clear (and destruct the Buffer's contents)
Definition Buffer.hpp:401
A representation of simulated time.
Definition Clock.hpp:51
CycleHistogramStandalone class for uint64_t values.
uint32_t visibility_t
Continuous visibility level. Several key points along continum are indicated within Visibility.
@ VIS_HIDDEN
Hidden hint. Lowest possible visibility.
static constexpr visibility_t AUTO_VISIBILITY
The default sparta resource visibility value that should be used. This is an alias of VIS_MAX at the ...
Set of StatisticDef and CounterBase-derived objects for visiblility through a sparta Tree.
Node in a composite tree representing a sparta Tree item.
Definition TreeNode.hpp:205
A collector of any iterable type (std::vector, std::list, sparta::Buffer, etc)
Provides a wrapper around a value to ensure that the value is assigned.
Macros for handling exponential backoff.