The Sparta Modeling Framework
Loading...
Searching...
No Matches
Buffer.hpp
Go to the documentation of this file.
1// <Buffer.h> -*- C++ -*-
2
3
10#pragma once
11
12#include <cinttypes>
13#include <vector>
14#include <algorithm>
15#include <type_traits>
16
22#include "sparta/collection/IterableCollector.hpp"
23#include "sparta/statistics/Counter.hpp"
25
26namespace sparta
27{
28
72 template <class DataT>
73 class Buffer
74 {
75 public:
76
77 // A typedef for this Buffer's type. This is useful for my
78 // subclasses, BufferIterator & EntryValidator
80
81 // Typedef for the DataT
82 typedef DataT value_type;
83
84 // Typedef for size_type
85 typedef uint32_t size_type;
86
87 private:
88
96 struct DataPointer {
97 private:
98 alignas(value_type) std::byte object_memory_[sizeof(value_type)];
99
100 public:
101 DataPointer() { }
102
103 DataPointer(DataPointer &&orig) {
104 ::memcpy(&object_memory_, &orig.object_memory_, sizeof(object_memory_));
105 data = reinterpret_cast<value_type*>(&object_memory_);
106 }
107
108 // No copies, only moves
109 DataPointer(const DataPointer &) = delete;
110
111 template<typename U>
112 void allocate(U && dat) {
113 data = new (&object_memory_) value_type(std::forward<U>(dat));
114 }
115
116 value_type * data = nullptr;
117 DataPointer* next_free = nullptr;
118 uint32_t physical_idx = 0;
119 };
120 //Forward Declaration
121 struct DataPointerValidator;
122
137 template <bool is_const_iterator = true>
138 class BufferIterator : public utils::IteratorTraits<std::bidirectional_iterator_tag, value_type>
139 {
140 private:
141 friend class Buffer<value_type>;
142 typedef typename std::conditional<is_const_iterator,
143 const value_type &, value_type &>::type DataReferenceType;
144 typedef typename std::conditional<is_const_iterator,
145 const BufferType *, BufferType *>::type BufferPointerType;
146 typedef typename std::conditional<is_const_iterator,
147 const DataPointer *, DataPointer *>::type DataPointerType;
154 uint32_t getIndex_() const {
155 if(buffer_entry_ == nullptr) {
156 return attached_buffer_->capacity();
157 }
158 return buffer_entry_->physical_idx;
159 }
160
162 BufferPointerType attached_buffer_ = nullptr;
163
165 DataPointerType buffer_entry_ = nullptr;
166
172 BufferIterator(BufferPointerType buffer, DataPointerType entry) :
173 attached_buffer_(buffer),
174 buffer_entry_(entry)
175 {}
176
177
178 public:
179
183 BufferIterator() = default;
184
189 BufferIterator(const BufferIterator<false> & iter) :
190 attached_buffer_(iter.attached_buffer_),
191 buffer_entry_(iter.buffer_entry_)
192 {}
193
198 BufferIterator(const BufferIterator<true> & iter) :
199 attached_buffer_(iter.attached_buffer_),
200 buffer_entry_(iter.buffer_entry_)
201 {}
202
206 BufferIterator& operator=(const BufferIterator&) = default;
207
209 bool operator<(const BufferIterator& rhs) const
210 {
211 sparta_assert(attached_buffer_ == rhs.attached_buffer_, "Cannot compare BufferIterators created by different buffers.");
212 return getIndex_() < rhs.getIndex_();
213 }
214
216 bool operator>(const BufferIterator& rhs) const
217 {
218 sparta_assert(attached_buffer_ == rhs.attached_buffer_, "Cannot compare BufferIterators created by different buffers.");
219 return getIndex_() > rhs.getIndex_();
220 }
221
223 bool operator==(const BufferIterator& rhs) const
224 {
225 sparta_assert(attached_buffer_ == rhs.attached_buffer_, "Cannot compare BufferIterators created by different buffers.");
226 return (buffer_entry_ == rhs.buffer_entry_);
227 }
228
230 bool operator!=(const BufferIterator& rhs) const
231 {
232 sparta_assert(attached_buffer_ == rhs.attached_buffer_, "Cannot compare BufferIterators created by different buffers.");
233 return !operator==(rhs);
234 }
235
238 bool isValid() const
239 {
240 if(buffer_entry_ != nullptr) {
241 return attached_buffer_->validator_->isValid(buffer_entry_);
242 }
243 return false;
244 }
245
247 DataReferenceType operator* () const {
248 sparta_assert(attached_buffer_, "The iterator is not attached to a buffer. Was it initialized?");
249 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
250 return *(buffer_entry_->data);
251 }
252
254 value_type * operator -> () {
255 sparta_assert(attached_buffer_, "The iterator is not attached to a buffer. Was it initialized?");
256 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
257 return buffer_entry_->data;
258 }
259
260 value_type const * operator -> () const {
261 sparta_assert(attached_buffer_, "The iterator is not attached to a buffer. Was it initialized?");
262 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
263 return buffer_entry_->data;
264 }
265
268 BufferIterator & operator++() {
269 sparta_assert(attached_buffer_, "The iterator is not attached to a buffer. Was it initialized?");
270 if(isValid()) {
271 uint32_t idx = buffer_entry_->physical_idx;
272 ++idx;
273 if(attached_buffer_->isValid(idx)) {
274 buffer_entry_ = attached_buffer_->buffer_map_[idx];
275 }
276 else {
277 buffer_entry_ = nullptr;
278 }
279 } else {
280 sparta_assert(attached_buffer_->numFree() > 0, "Incrementing the iterator to entry that is not valid");
281 }
282 return *this;
283 }
284
286 BufferIterator operator++ (int) {
287 BufferIterator buf_iter(*this);
288 operator++();
289 return buf_iter;
290 }
291
293 BufferIterator & operator-- ()
294 {
295 sparta_assert(attached_buffer_, "The iterator is not attached to a buffer. Was it initialized?");
296 if(isValid()) {
297 uint32_t idx = buffer_entry_->physical_idx;
298 --idx;
299 if(attached_buffer_->isValid(idx)) {
300 buffer_entry_ = attached_buffer_->buffer_map_[idx];
301 }
302 else {
303 sparta_assert(idx < attached_buffer_->capacity(), "Decrementing the iterator results in buffer underrun");
304 buffer_entry_ = nullptr;
305 }
306 }
307 else if (attached_buffer_->size()) {
308 buffer_entry_ = attached_buffer_->buffer_map_[attached_buffer_->size()-1];
309 }
310 return *this;
311 }
312
314 BufferIterator operator-- (int) {
315 BufferIterator buf_iter(*this);
316 operator--();
317 return buf_iter;
318 }
319
324 friend class BufferIterator<true>;
325 };
326
327 public:
328
330 typedef BufferIterator<false> iterator;
331
333 typedef BufferIterator<true> const_iterator;
334
336 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
337
339 typedef std::reverse_iterator<iterator> reverse_iterator;
340
378 Buffer(const std::string & name,
379 const uint32_t num_entries,
380 const Clock * clk,
381 StatisticSet * statset = nullptr,
386
388 Buffer(const Buffer<value_type> & ) = delete;
389
392
395
397 ~Buffer() { clear(); }
398
402 const std::string & getName() const {
403 return name_;
404 }
405
412 bool isValid(uint32_t idx) const {
413 return idx < size();
414 }
415
421 const value_type & read(uint32_t idx) const {
423 return *(buffer_map_[idx]->data);
424 }
425
430 const value_type & read(const const_iterator & entry) const
431 {
432 return read(entry.getIndex_());
433 }
434
439 const value_type & read(const const_reverse_iterator & entry) const
440 {
441 return read(entry.base().getIndex_());
442 }
443
450 value_type & access(uint32_t idx) {
452 return *(buffer_map_[idx]->data);
453 }
454
459 value_type & access(const const_iterator & entry) {
460 return access(entry.getIndex_());
461 }
462
467 value_type & access(const const_reverse_iterator & entry) {
468 return access(entry.base().getIndex_());
469 }
470
476 value_type & accessBack() {
477 sparta_assert(isValid(num_valid_ - 1));
478 return *(buffer_map_[num_valid_ - 1]->data);
479 }
480
485 size_type capacity() const {
486 return num_entries_;
487 }
488
493 size_type size() const {
494 return num_valid_;
495 }
496
503 size_type numFree() const {
504 return capacity() - size();
505 }
506
516 iterator push_back(const value_type& dat)
517 {
518 return push_backImpl_(dat);
519 }
520
530 iterator push_back(value_type&& dat)
531 {
532 return push_backImpl_(std::move(dat));
533 }
534
553 iterator insert(uint32_t idx, const value_type& dat)
554 {
555 return insertImpl_(idx, dat);
556 }
557
576 iterator insert(uint32_t idx, value_type&& dat)
577 {
578 return insertImpl_(idx, std::move(dat));
579 }
580
582 iterator insert(const const_iterator & entry, const value_type& dat)
583 {
584 return insert(entry.getIndex_(), dat);
585 }
586
588 iterator insert(const const_iterator & entry, value_type&& dat)
589 {
590 return insert(entry.getIndex_(), std::move(dat));
591 }
592
594 iterator insert(const const_reverse_iterator & entry, const value_type& dat)
595 {
596 return insert(entry.base().getIndex_(), dat);
597 }
598
600 iterator insert(const const_reverse_iterator & entry, value_type&& dat)
601 {
602 return insert(entry.base().getIndex_(), std::move(dat));
603 }
604
619 void erase(const uint32_t& idx)
620 {
621 // Make sure we are invalidating an already valid object.
622 sparta_assert(idx < size(), "Cannot erase an index that is not already valid");
623
624 // Do the invalidation immediately
625 // 1. Move the free space pointer to the erased position.
626 // 2. Call the DataT's destructor
627 // 3. Set the current free space pointer's next to the old free position
628 DataPointer* oldFree = free_position_;
629 free_position_ = buffer_map_[idx];
630 free_position_->data->~value_type();
631 free_position_->next_free = oldFree;
632
633 // Mark DataPointer as invalid
634 validator_->detachDataPointer(free_position_);
635
636 // Shift all the positions above the invalidation in the map one space down.
637 uint32_t i = idx;
638 sparta_assert(num_valid_ > 0);
639 const uint32_t top_idx_of_buffer = num_valid_ - 1;
640 while(i < top_idx_of_buffer)
641 {
642 // assert that we are not going to do an invalid read.
643 sparta_assert(i + 1 < num_entries_);
644 buffer_map_[i] = buffer_map_[i + 1];
645 buffer_map_[i]->physical_idx = i;
646
647 // Shift the indexes in the address map.
648 address_map_[i] = address_map_[i + 1];
649 ++i;
650 }
651
652 // the entry at the old num_valid_ in the map now points to nullptr
653 buffer_map_[top_idx_of_buffer] = nullptr;
654
655 // Remove this entry of the address map as it becomes a free position.
656 address_map_.erase(top_idx_of_buffer);
657
658 // update counts.
659 --num_valid_;
660 updateUtilizationCounters_();
661 }
662
667 void erase(const const_iterator& entry)
668 {
669 sparta_assert(entry.attached_buffer_ == this, "Cannot erase an entry created by another Buffer");
670 // erase the index in the actual buffer.
671 erase(entry.getIndex_());
672 }
673
678 void erase(const const_reverse_iterator& entry)
679 {
680 sparta_assert(entry.base().attached_buffer_ == this, "Cannot erase an entry created by another Buffer");
681 // erase the index in the actual buffer.
682 erase(entry.base().getIndex_());
683 }
684
689 void clear()
690 {
691 num_valid_ = 0;
692 std::for_each(buffer_map_.begin(), buffer_map_.end(),
693 [] (auto map_entry)
694 {
695 if(map_entry) {
696 map_entry->data->~value_type();
697 }
698 });
699 std::fill(buffer_map_.begin(), buffer_map_.end(), nullptr);
700 for(uint32_t i = 0; i < data_pool_size_ - 1; ++i) {
701 data_pool_[i].next_free = &data_pool_[i + 1];
702 }
703 data_pool_[data_pool_size_ - 1].next_free = &data_pool_[data_pool_size_ - 1];
704 free_position_ = &data_pool_[0];
705 first_position_ = &data_pool_[0];
706 validator_->clear();
707 address_map_.clear();
708 updateUtilizationCounters_();
709 }
710
715 bool empty() const
716 {
717 return num_valid_ == 0;
718 }
719
729 void enableCollection(TreeNode * parent) {
730 collector_.
731 reset(new collection::IterableCollector<Buffer<DataT> >(parent, getName(),
732 this, capacity()));
733 }
734
740 if(size()) {
741 sparta_assert(buffer_map_[0]);
742 return iterator(this, buffer_map_[0]);
743 }
744 return end();
745 }
746
751 iterator end() { return iterator(this, nullptr);}
752
758 if(size()) {
759 return const_iterator(this, buffer_map_[0]);
760 }
761 return end();
762 }
763
768 const_iterator end() const { return const_iterator(this, nullptr);}
769
775 return reverse_iterator(end());
776 }
777
783
789 return const_reverse_iterator(end());
790 }
791
797
804 void makeInfinite(const uint32_t resize_delta = 1) {
805 is_infinite_mode_ = true;
806 resize_delta_ = resize_delta;
807 }
808
809 private:
810
811 typedef std::vector<DataPointer> DataPool;
812 typedef std::vector<DataPointer*> PointerList;
813
814 struct DataPointerValidator
815 {
817 // data_pool_ of the Buffer class.
818 const DataPool * data_pool_;
819 std::vector<uint32_t> validator_;
820 size_type getIndex_(const DataPointer * dp)const {
821 auto i = (dp - &(*data_pool_)[0]);
822 return static_cast<size_type>(i);
823 }
824
825 DataPointerValidator(const Buffer &b):
826 data_pool_(&b.data_pool_),
827 validator_(b.num_entries_, 0)
828 {}
829
830 void attachDataPointer(const DataPointer* dp){
831 validator_[getIndex_(dp)] = 1;
832 }
833
834 bool isValid(const DataPointer * dp) const {
835 return bool(validator_[getIndex_(dp)]);
836 }
837
838 void detachDataPointer(DataPointer * dp) {
839 validator_[getIndex_(dp)] = 0;
840 }
841
842 void clear() {
843 std::fill(validator_.begin(), validator_.end(), 0);
844 }
845
851 void resizeIteratorValidator(const uint32_t resize_delta,
852 const DataPool & data_pool) {
853 validator_.resize(validator_.capacity() + resize_delta);
854 data_pool_ = &data_pool;
855 }
856 };
857
858 void updateUtilizationCounters_() {
859 // Update Counters
860 if(utilization_) {
861 utilization_->setValue(num_valid_);
862 }
863 }
864
871 void resizeInternalContainers_() {
872
873 // Assert that the Buffer class is in Infinite-Mode.
874 sparta_assert(is_infinite_mode_, "The Buffer class must be in Infinite-Mode in order to resize itself.");
875
876 // We do not resize if there are available slots in buffer.
877 if(numFree() != 0) {
878 return;
879 }
880
881 // Resize the buffer_map_ with the amount provided by user.
882 buffer_map_.resize(buffer_map_.capacity() + resize_delta_);
883
884 // The number of entries the buffer can hold is its capacity.
885 num_entries_ = buffer_map_.capacity();
886
887 // Resize the data_pool_ to twice the capacity of the buffer_map_.
888 data_pool_.resize(num_entries_ * 2);
889
890 // The number of entries the pool can hold is its capacity.
891 data_pool_size_ = data_pool_.capacity();
892
893
894 // Each entry in data_pool_ should have their next free position
895 // pointer point to the slot to its right.
896 for(uint32_t i = 0; i < data_pool_size_ - 1; ++i) {
897 data_pool_[i].next_free = &data_pool_[i + 1];
898 }
899
900 // The last entry should point to itself as the next free position.
901 data_pool_[data_pool_size_ - 1].next_free = &data_pool_[data_pool_size_ - 1];
902
903 // The first position should point to the first entry.
904 first_position_ = &data_pool_[0];
905
906 // The free position should point to the location according to
907 // the number of entries in the buffer.
908 free_position_ = &data_pool_[num_valid_];
909
910 // Make all the pointers in buffer_map_ point to the appropriate indexes.
911 for(uint32_t i = 0; i < num_valid_; ++i) {
912 buffer_map_[i] = &data_pool_[address_map_[i]];
913 }
914
915 // Resize the validator vector and relink the validator data pool.
916 validator_->resizeIteratorValidator(resize_delta_, data_pool_);
917 }
918
919 template<typename U>
920 iterator push_backImpl_(U&& dat)
921 {
922
923 // Check to see if the vectors need to be resized and relinked.
924 if(SPARTA_EXPECT_FALSE(is_infinite_mode_)) {
925 resizeInternalContainers_();
926 }
927 sparta_assert(numFree(), "Buffer exhausted");
928 sparta_assert(free_position_ != nullptr);
929 free_position_->allocate(std::forward<U>(dat));
930 free_position_->physical_idx = num_valid_;
931
932 // Create the entry to be returned.
933 iterator entry(this, free_position_);
934
935 // Do the append now. We can do this with different logic
936 // that does not require a process.
937 buffer_map_[num_valid_] = free_position_;
938
939 // Store the index in the data_pool_ to which current
940 // free_position_ points to. We need to relink all these
941 // pointers once the data_pool_ resizes.
942 address_map_[num_valid_] =
943 static_cast<uint32_t>(free_position_ - &data_pool_[0]);
944 //Mark this data pointer as valid
945 validator_->attachDataPointer(free_position_);
946 ++num_valid_;
947 free_position_ = free_position_->next_free;
948 updateUtilizationCounters_();
949
950 return entry;
951 }
952
953 template<typename U>
954 iterator insertImpl_(uint32_t idx, U&& dat)
955 {
956 // Check to see if the vectors need to be resized and relinked.
957 if(SPARTA_EXPECT_FALSE(is_infinite_mode_)) {
958 resizeInternalContainers_();
959 }
960 sparta_assert(numFree(), "Buffer '" << getName() << "' exhausted");
961 sparta_assert(idx <= num_valid_, "Buffer '" << getName()
962 << "': Cannot insert before a non valid index");
963 sparta_assert(free_position_ != nullptr);
964 free_position_->allocate(std::forward<U>(dat));
965 free_position_->physical_idx = idx;
966
967 //Mark this data pointer as valid
968 validator_->attachDataPointer(free_position_);
969
970 // Create the entry to be returned.
971 iterator entry(this, free_position_);
972
973 //Shift all the positions above idx in the map one space down.
974 uint32_t i = num_valid_;
975 while(i > idx)
976 {
977 //assert that we are not going to do an invalid read.
978 buffer_map_[i] = buffer_map_[i - 1];
979 buffer_map_[i]->physical_idx = i ;
980
981 // Shift the indexes in the map.
982 address_map_[i] = address_map_[i - 1];
983 --i;
984 }
985
986 buffer_map_[idx] = free_position_;
987
988 // Store the index in the data_pool_ to which current
989 // free_position_ points to. We need to relink all these
990 // pointers once the data_pool_ resizes.
991 address_map_[num_valid_] =
992 static_cast<uint32_t>(free_position_ - &data_pool_[0]);
993 ++num_valid_;
994 free_position_ = free_position_->next_free;
995 updateUtilizationCounters_();
996 return entry;
997 }
998
999 std::string name_;
1000 const Clock * clk_ = nullptr;
1001 size_type num_entries_;
1002 PointerList buffer_map_;
1003 size_type data_pool_size_;
1004 DataPool data_pool_;
1006 DataPointer* free_position_ = 0;
1007 DataPointer* first_position_ = 0;
1008 size_type num_valid_ = 0;
1009 std::unique_ptr<DataPointerValidator> validator_;
1012 // Counters
1013 std::unique_ptr<sparta::CycleHistogramStandalone> utilization_;
1014
1016 // Collectors
1017 std::unique_ptr<collection::IterableCollector<Buffer<value_type> > > collector_;
1018
1020 // The behaviour of these methods change accordingly.
1021 bool is_infinite_mode_ {false};
1022
1024 // The additional amount of entries the vector must allocate when resizing.
1026
1028 std::unordered_map<uint32_t, uint32_t> address_map_;
1029 };
1030
1032 // Definitions...
1033 template<class DataT>
1034 Buffer<DataT>::Buffer(const std::string & name,
1035 uint32_t num_entries,
1036 const Clock * clk,
1037 StatisticSet * statset,
1038 InstrumentationNode::visibility_t stat_vis_general,
1039 InstrumentationNode::visibility_t stat_vis_detailed,
1041 InstrumentationNode::visibility_t stat_vis_avg) :
1042 name_(name),
1043 clk_(clk),
1044 num_entries_(num_entries),
1045 data_pool_size_(num_entries* 2)
1046 {
1047 if((num_entries > 0) && statset)
1048 {
1049 utilization_.reset(new CycleHistogramStandalone(statset, clk_,
1050 name_ + "_utilization",
1051 name_ + " occupancy histogram",
1052 0, num_entries, 1, 0,
1053 stat_vis_general,
1054 stat_vis_detailed,
1055 stat_vis_max,
1056 stat_vis_avg));
1057 }
1058
1059 buffer_map_.resize(num_entries_);
1060 data_pool_.resize(data_pool_size_);
1061
1062 // Must set the validator before you clear
1063 validator_.reset(new DataPointerValidator(*this));
1064 clear();
1065 }
1066
1068 template<typename DataT>
1070 name_(std::move(rval.name_)),
1071 clk_(rval.clk_),
1072 num_entries_(rval.num_entries_),
1073 buffer_map_(std::move(rval.buffer_map_)),
1074 data_pool_size_(rval.data_pool_size_),
1075 data_pool_(std::move(rval.data_pool_)),
1076 free_position_(rval.free_position_),
1077 first_position_(rval.first_position_),
1078 num_valid_(rval.num_valid_),
1079 validator_(new DataPointerValidator(*this)),
1080 utilization_(std::move(rval.utilization_)),
1081 collector_(std::move(rval.collector_)),
1082 is_infinite_mode_(rval.is_infinite_mode_),
1083 resize_delta_(std::move(rval.resize_delta_)),
1084 address_map_(std::move(rval.address_map_)){
1085 rval.clk_ = nullptr;
1086 rval.num_entries_ = 0;
1087 rval.data_pool_size_ = 0;
1088 rval.free_position_ = nullptr;
1089 rval.first_position_ = nullptr;
1090 rval.num_valid_ = 0;
1091 rval.utilization_ = nullptr;
1092 rval.collector_ = nullptr;
1093 validator_->validator_ = std::move(rval.validator_->validator_);
1094 if(collector_) {
1095 collector_->reattach(this);
1096 }
1097 }
1098}
CycleHistogram implementation using sparta CycleCounter.
Defines a few handy (and now deprecated) C++ iterator traits.
Contains a collection implementation of various compile-time metaprogramming and Type-Detection APIs ...
Set of macros for Sparta assertions. Caught by the framework.
#define sparta_assert(...)
Simple variadic assertion that will throw a sparta_exception if the condition fails.
#define SPARTA_EXPECT_FALSE(x)
A macro for hinting to the compiler a particular condition should be considered most likely false.
Contains a statistic definition (some useful information which can be computed)
Contains a StatisticInstance which refers to a StatisticDef or Counter and some local state to comput...
A data structure allowing appending at the end, beginning, or middle, but erase anywhere with collaps...
Definition Buffer.hpp:74
iterator insert(uint32_t idx, const value_type &dat)
Insert the item BEFORE the given index.
Definition Buffer.hpp:553
void clear()
Empty the contents of the Buffer.
Definition Buffer.hpp:689
size_type numFree() const
Return the number of free entries.
Definition Buffer.hpp:503
const std::string & getName() const
Name of this resource.
Definition Buffer.hpp:402
Buffer(Buffer< value_type > &&)
Move Constructor to allow moves.
Definition Buffer.hpp:1069
const value_type & read(const const_iterator &entry) const
read the entry at the BufferIterator's location
Definition Buffer.hpp:430
iterator insert(const const_iterator &entry, value_type &&dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:588
std::reverse_iterator< iterator > reverse_iterator
Typedef for constant reverse iterator.
Definition Buffer.hpp:339
const_reverse_iterator rend() const
Returns a const_reverse_iterator referring to start element in the Buffer container.
Definition Buffer.hpp:796
void erase(const uint32_t &idx)
erase a position in the Buffer immediately.
Definition Buffer.hpp:619
Buffer(const std::string &name, const uint32_t num_entries, const Clock *clk, StatisticSet *statset=nullptr, InstrumentationNode::visibility_t stat_vis_general=InstrumentationNode::AUTO_VISIBILITY, InstrumentationNode::visibility_t stat_vis_detailed=InstrumentationNode::VIS_HIDDEN, InstrumentationNode::visibility_t stat_vis_max=InstrumentationNode::AUTO_VISIBILITY, InstrumentationNode::visibility_t stat_vis_avg=InstrumentationNode::AUTO_VISIBILITY)
Construct a buffer.
Definition Buffer.hpp:1034
size_type size() const
Return the number of valid entries.
Definition Buffer.hpp:493
bool empty() const
Query if the buffer is empty.
Definition Buffer.hpp:715
const_iterator begin() const
Get the const_iterator pointing to the begin of Buffer.
Definition Buffer.hpp:757
iterator push_back(const value_type &dat)
Append data to the end of Buffer, and return a BufferIterator.
Definition Buffer.hpp:516
reverse_iterator rend()
Returns an reverse iterator referring to starting element in the Buffer container.
Definition Buffer.hpp:782
BufferIterator< true > const_iterator
Typedef for constant iterator.
Definition Buffer.hpp:333
void makeInfinite(const uint32_t resize_delta=1)
Makes the Buffer grow beyond its capacity. The buffer grows by adding new entries in its internal vec...
Definition Buffer.hpp:804
iterator begin()
Get the iterator pointing to the beginning of Buffer.
Definition Buffer.hpp:739
value_type & access(const const_reverse_iterator &entry)
Read and return the data at the given BufferIterator's location, reference.
Definition Buffer.hpp:467
iterator push_back(value_type &&dat)
Append data to the end of Buffer, and return a BufferIterator.
Definition Buffer.hpp:530
const_iterator end() const
Returns a const_iterator referring to past-the-end element in the Buffer container.
Definition Buffer.hpp:768
Buffer & operator=(const Buffer< value_type > &)=delete
No copies allowed for Buffer.
const value_type & read(uint32_t idx) const
Read and return the data at the given index, const reference.
Definition Buffer.hpp:421
iterator insert(const const_iterator &entry, const value_type &dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:582
const value_type & read(const const_reverse_iterator &entry) const
read the entry at the BufferIterator's location
Definition Buffer.hpp:439
reverse_iterator rbegin()
Get the iterator pointing to the pass-the-end element of Buffer.
Definition Buffer.hpp:774
iterator insert(const const_reverse_iterator &entry, value_type &&dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:600
value_type & access(const const_iterator &entry)
Read and return the data at the given BufferIterator's location, reference.
Definition Buffer.hpp:459
value_type & accessBack()
Read and return the data at the bottom of the Buffer.
Definition Buffer.hpp:476
std::reverse_iterator< const_iterator > const_reverse_iterator
Typedef for regular reverse iterator.
Definition Buffer.hpp:336
bool isValid(uint32_t idx) const
Determine if data at the index is valid.
Definition Buffer.hpp:412
iterator insert(uint32_t idx, value_type &&dat)
Insert the item BEFORE the given index.
Definition Buffer.hpp:576
size_type capacity() const
Return the fixed size of this buffer.
Definition Buffer.hpp:485
void enableCollection(TreeNode *parent)
Request that this queue begin collecting its contents for pipeline collection.
Definition Buffer.hpp:729
Buffer(const Buffer< value_type > &)=delete
No copies allowed for Buffer.
iterator insert(const const_reverse_iterator &entry, const value_type &dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:594
value_type & access(uint32_t idx)
Read and return the data at the given index, reference.
Definition Buffer.hpp:450
void erase(const const_iterator &entry)
erase the index at which the entry exists in the Buffer.
Definition Buffer.hpp:667
BufferIterator< false > iterator
Typedef for regular iterator.
Definition Buffer.hpp:330
const_reverse_iterator rbegin() const
Get the const_reverse_iterator pointing to the pass-the-end of Buffer.
Definition Buffer.hpp:788
void erase(const const_reverse_iterator &entry)
erase the index at which the entry exists in the Buffer.
Definition Buffer.hpp:678
iterator end()
Returns an iterator referring to past-the-end element in the Buffer container.
Definition Buffer.hpp:751
~Buffer()
Clear (and destruct the Buffer's contents)
Definition Buffer.hpp:397
A representation of simulated time.
Definition Clock.hpp:51
CycleHistogramStandalone class for uint64_t values.
uint32_t visibility_t
Continuous visibility level. Several key points along continum are indicated within Visibility.
@ VIS_HIDDEN
Hidden hint. Lowest possible visibility.
static constexpr visibility_t AUTO_VISIBILITY
The default sparta resource visibility value that should be used. This is an alias of VIS_MAX at the ...
Set of StatisticDef and CounterBase-derived objects for visiblility through a sparta Tree.
Node in a composite tree representing a sparta Tree item.
Definition TreeNode.hpp:205
A collector of any iterable type (std::vector, std::list, sparta::Buffer, etc)
Provides a wrapper around a value to ensure that the value is assigned.
Macros for handling exponential backoff.