The Sparta Modeling Framework
Loading...
Searching...
No Matches
Buffer.hpp
Go to the documentation of this file.
1// <Buffer.h> -*- C++ -*-
2
3
10#pragma once
11
12#include <cinttypes>
13#include <vector>
14#include <algorithm>
15#include <type_traits>
16
22#include "sparta/collection/IterableCollector.hpp"
23#include "sparta/statistics/Counter.hpp"
25
26namespace sparta
27{
28
72 template <class DataT>
73 class Buffer
74 {
75 public:
76
77 // A typedef for this Buffer's type. This is useful for my
78 // subclasses, BufferIterator & EntryValidator
80
81 // Typedef for the DataT
82 typedef DataT value_type;
83
84 // Typedef for size_type
85 typedef uint32_t size_type;
86
87 private:
88
96 struct DataPointer {
97 private:
98 alignas(value_type) std::byte object_memory_[sizeof(value_type)];
99
100 public:
101 DataPointer() { }
102 ~DataPointer() { }
103
104 DataPointer(DataPointer &&orig) noexcept {
105 ::memcpy(&object_memory_, &orig.object_memory_, sizeof(object_memory_));
106 data = reinterpret_cast<value_type*>(&object_memory_);
107 }
108
109 // No copies, only moves
110 DataPointer(const DataPointer &) = delete;
111
112 template<typename U>
113 void allocate(U && dat) {
114 data = new (&object_memory_) value_type(std::forward<U>(dat));
115 }
116
117 value_type * data = nullptr;
118 DataPointer* next_free = nullptr;
119 uint32_t physical_idx = 0;
120 };
121 //Forward Declaration
122 struct DataPointerValidator;
123
138 template <bool is_const_iterator = true>
139 class BufferIterator : public utils::IteratorTraits<std::bidirectional_iterator_tag, value_type>
140 {
141 private:
142 friend class Buffer<value_type>;
143 typedef typename std::conditional<is_const_iterator,
144 const value_type &, value_type &>::type DataReferenceType;
145 typedef typename std::conditional<is_const_iterator,
146 const BufferType *, BufferType *>::type BufferPointerType;
147 typedef typename std::conditional<is_const_iterator,
148 const DataPointer *, DataPointer *>::type DataPointerType;
155 uint32_t getIndex_() const {
156 if(buffer_entry_ == nullptr) {
157 return attached_buffer_->capacity();
158 }
159 return buffer_entry_->physical_idx;
160 }
161
163 BufferPointerType attached_buffer_ = nullptr;
164
166 DataPointerType buffer_entry_ = nullptr;
167
173 BufferIterator(BufferPointerType buffer, DataPointerType entry) :
174 attached_buffer_(buffer),
175 buffer_entry_(entry)
176 {}
177
178
179 public:
180
184 BufferIterator() = default;
185
190 BufferIterator(const BufferIterator<false> & iter) :
191 attached_buffer_(iter.attached_buffer_),
192 buffer_entry_(iter.buffer_entry_)
193 {}
194
199 BufferIterator(const BufferIterator<true> & iter) :
200 attached_buffer_(iter.attached_buffer_),
201 buffer_entry_(iter.buffer_entry_)
202 {}
203
207 BufferIterator& operator=(const BufferIterator&) = default;
208
210 bool operator<(const BufferIterator& rhs) const
211 {
212 sparta_assert(attached_buffer_ == rhs.attached_buffer_,
213 "Cannot compare BufferIterators created by different buffers.");
214 return getIndex_() < rhs.getIndex_();
215 }
216
218 bool operator>(const BufferIterator& rhs) const
219 {
220 sparta_assert(attached_buffer_ == rhs.attached_buffer_,
221 "Cannot compare BufferIterators created by different buffers.");
222 return getIndex_() > rhs.getIndex_();
223 }
224
226 bool operator==(const BufferIterator& rhs) const
227 {
228 sparta_assert(attached_buffer_ == rhs.attached_buffer_,
229 "Cannot compare BufferIterators created by different buffers.");
230 return (buffer_entry_ == rhs.buffer_entry_);
231 }
232
234 bool operator!=(const BufferIterator& rhs) const
235 {
236 sparta_assert(attached_buffer_ == rhs.attached_buffer_,
237 "Cannot compare BufferIterators created by different buffers.");
238 return !operator==(rhs);
239 }
240
243 bool isValid() const
244 {
245 if(buffer_entry_ != nullptr) {
246 return attached_buffer_->validator_->isValid(buffer_entry_);
247 }
248 return false;
249 }
250
252 DataReferenceType operator* () const {
253 sparta_assert(attached_buffer_,
254 "The iterator is not attached to a buffer. Was it initialized?");
255 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
256 return *(buffer_entry_->data);
257 }
258
260 value_type * operator -> () {
261 sparta_assert(attached_buffer_,
262 "The iterator is not attached to a buffer. Was it initialized?");
263 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
264 return buffer_entry_->data;
265 }
266
267 value_type const * operator -> () const {
268 sparta_assert(attached_buffer_,
269 "The iterator is not attached to a buffer. Was it initialized?");
270 sparta_assert(isValid(), "Iterator is not valid for dereferencing");
271 return buffer_entry_->data;
272 }
273
276 BufferIterator & operator++() {
277 sparta_assert(attached_buffer_,
278 "The iterator is not attached to a buffer. Was it initialized?");
279 sparta_assert(isValid(), "Incrementing an iterator that is not valid");
280 const uint32_t idx = buffer_entry_->physical_idx + 1;
281 if(attached_buffer_->isValid(idx)) {
282 buffer_entry_ = attached_buffer_->buffer_map_[idx];
283 }
284 else {
285 buffer_entry_ = nullptr;
286 }
287 return *this;
288 }
289
291 BufferIterator operator++ (int) {
292 BufferIterator buf_iter(*this);
293 operator++();
294 return buf_iter;
295 }
296
298 BufferIterator & operator-- ()
299 {
300 sparta_assert(attached_buffer_, "The iterator is not attached to a buffer. Was it initialized?");
301 if(isValid()) {
302 uint32_t idx = buffer_entry_->physical_idx;
303 --idx;
304 if(attached_buffer_->isValid(idx)) {
305 buffer_entry_ = attached_buffer_->buffer_map_[idx];
306 }
307 else {
308 sparta_assert(idx < attached_buffer_->capacity(), "Decrementing the iterator results in buffer underrun");
309 buffer_entry_ = nullptr;
310 }
311 }
312 else if (attached_buffer_->size()) {
313 buffer_entry_ = attached_buffer_->buffer_map_[attached_buffer_->size()-1];
314 }
315 return *this;
316 }
317
319 BufferIterator operator-- (int) {
320 BufferIterator buf_iter(*this);
321 operator--();
322 return buf_iter;
323 }
324
329 friend class BufferIterator<true>;
330 };
331
332 public:
333
335 typedef BufferIterator<false> iterator;
336
338 typedef BufferIterator<true> const_iterator;
339
341 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
342
344 typedef std::reverse_iterator<iterator> reverse_iterator;
345
383 Buffer(const std::string & name,
384 const uint32_t num_entries,
385 const Clock * clk,
386 StatisticSet * statset = nullptr,
391
393 Buffer(const Buffer<value_type> & ) = delete;
394
397
400
402 ~Buffer() { clear(); }
403
407 const std::string & getName() const {
408 return name_;
409 }
410
417 bool isValid(uint32_t idx) const {
418 return idx < size();
419 }
420
426 const value_type & read(uint32_t idx) const {
428 return *(buffer_map_[idx]->data);
429 }
430
435 const value_type & read(const const_iterator & entry) const
436 {
437 return read(entry.getIndex_());
438 }
439
444 const value_type & read(const const_reverse_iterator & entry) const
445 {
446 return read(std::prev(entry.base()));
447 }
448
455 value_type & access(uint32_t idx) {
457 return *(buffer_map_[idx]->data);
458 }
459
464 value_type & access(const const_iterator & entry) {
465 return access(entry.getIndex_());
466 }
467
472 value_type & access(const const_reverse_iterator & entry) {
473 return access(std::prev(entry.base()));
474 }
475
481 value_type & accessBack() {
482 sparta_assert(isValid(num_valid_ - 1));
483 return *(buffer_map_[num_valid_ - 1]->data);
484 }
485
490 size_type capacity() const {
491 return num_entries_;
492 }
493
498 size_type size() const {
499 return num_valid_;
500 }
501
508 size_type numFree() const {
509 return capacity() - size();
510 }
511
521 iterator push_back(const value_type& dat)
522 {
523 return push_backImpl_(dat);
524 }
525
535 iterator push_back(value_type&& dat)
536 {
537 return push_backImpl_(std::move(dat));
538 }
539
558 iterator insert(uint32_t idx, const value_type& dat)
559 {
560 return insertImpl_(idx, dat);
561 }
562
581 iterator insert(uint32_t idx, value_type&& dat)
582 {
583 return insertImpl_(idx, std::move(dat));
584 }
585
587 iterator insert(const const_iterator & entry, const value_type& dat)
588 {
589 return insert(entry.getIndex_(), dat);
590 }
591
593 iterator insert(const const_iterator & entry, value_type&& dat)
594 {
595 return insert(entry.getIndex_(), std::move(dat));
596 }
597
599 iterator insert(const const_reverse_iterator & entry, const value_type& dat)
600 {
601 return insert(entry.base().getIndex_(), dat);
602 }
603
605 iterator insert(const const_reverse_iterator & entry, value_type&& dat)
606 {
607 return insert(entry.base().getIndex_(), std::move(dat));
608 }
609
624 void erase(uint32_t idx)
625 {
626 // Make sure we are invalidating an already valid object.
627 sparta_assert(idx < size(),
628 "Cannot erase an index that is not already valid");
629
630 // Do the invalidation immediately
631 // 1. Move the free space pointer to the erased position.
632 // 2. Call the DataT's destructor
633 // 3. Set the current free space pointer's next to the old free position
634 DataPointer* oldFree = free_position_;
635 free_position_ = buffer_map_[idx];
636 free_position_->data->~value_type();
637 free_position_->next_free = oldFree;
638
639 // Mark DataPointer as invalid
640 validator_->detachDataPointer(free_position_);
641
642 // Shift all the positions above the invalidation in the map one space down.
643 sparta_assert(num_valid_ > 0);
644 const uint32_t top_idx_of_buffer = num_valid_ - 1;
645 while(idx < top_idx_of_buffer)
646 {
647 // assert that we are not going to do an invalid read.
648 sparta_assert(idx + 1 < num_entries_);
649 buffer_map_[idx] = buffer_map_[idx + 1];
650 buffer_map_[idx]->physical_idx = idx;
651
652 // Shift the indexes in the address map.
653 address_map_[idx] = address_map_[idx + 1];
654 ++idx;
655 }
656
657 // the entry at the old num_valid_ in the map now points to nullptr
658 buffer_map_[top_idx_of_buffer] = nullptr;
659
660 // Remove this entry of the address map as it becomes a free position.
661 address_map_.erase(top_idx_of_buffer);
662
663 // update counts.
664 --num_valid_;
665 updateUtilizationCounters_();
666 }
667
673 {
674 sparta_assert(entry.attached_buffer_ == this,
675 "Cannot erase an entry created by another Buffer");
676 // erase the index in the actual buffer.
677 erase(entry.getIndex_());
678 return {this, buffer_map_[entry.getIndex_()]};
679 }
680
686 {
687 return reverse_iterator{erase(std::prev(entry.base()))};
688 }
689
694 void clear()
695 {
696 num_valid_ = 0;
697 std::for_each(buffer_map_.begin(), buffer_map_.end(),
698 [] (auto map_entry)
699 {
700 if(map_entry) {
701 map_entry->data->~value_type();
702 }
703 });
704 std::fill(buffer_map_.begin(), buffer_map_.end(), nullptr);
705 for(uint32_t i = 0; i < data_pool_size_ - 1; ++i) {
706 data_pool_[i].next_free = &data_pool_[i + 1];
707 }
708 data_pool_[data_pool_size_ - 1].next_free = &data_pool_[data_pool_size_ - 1];
709 free_position_ = &data_pool_[0];
710 first_position_ = &data_pool_[0];
711 validator_->clear();
712 address_map_.clear();
713 updateUtilizationCounters_();
714 }
715
720 bool empty() const
721 {
722 return num_valid_ == 0;
723 }
724
734 void enableCollection(TreeNode * parent) {
735 collector_.
736 reset(new collection::IterableCollector<Buffer<DataT> >(parent, getName(),
737 this, capacity()));
738 }
739
745 if(size()) {
746 sparta_assert(buffer_map_[0]);
747 return iterator(this, buffer_map_[0]);
748 }
749 return end();
750 }
751
756 iterator end() { return iterator(this, nullptr);}
757
763 if(size()) {
764 return const_iterator(this, buffer_map_[0]);
765 }
766 return end();
767 }
768
773 const_iterator end() const { return const_iterator(this, nullptr);}
774
780 return reverse_iterator(end());
781 }
782
788
794 return const_reverse_iterator(end());
795 }
796
802
809 void makeInfinite(const uint32_t resize_delta = 1) {
810 is_infinite_mode_ = true;
811 resize_delta_ = resize_delta;
812 }
813
814 private:
815
816 typedef std::vector<DataPointer> DataPool;
817 typedef std::vector<DataPointer*> PointerList;
818
819 struct DataPointerValidator
820 {
822 // data_pool_ of the Buffer class.
823 const DataPool * data_pool_;
824 std::vector<uint32_t> validator_;
825 size_type getIndex_(const DataPointer * dp)const {
826 auto i = (dp - &(*data_pool_)[0]);
827 return static_cast<size_type>(i);
828 }
829
830 DataPointerValidator(const Buffer &b):
831 data_pool_(&b.data_pool_),
832 validator_(b.num_entries_, 0)
833 {}
834
835 void attachDataPointer(const DataPointer* dp){
836 validator_[getIndex_(dp)] = 1;
837 }
838
839 bool isValid(const DataPointer * dp) const {
840 return bool(validator_[getIndex_(dp)]);
841 }
842
843 void detachDataPointer(DataPointer * dp) {
844 validator_[getIndex_(dp)] = 0;
845 }
846
847 void clear() {
848 std::fill(validator_.begin(), validator_.end(), 0);
849 }
850
856 void resizeIteratorValidator(const uint32_t resize_delta,
857 const DataPool & data_pool) {
858 validator_.resize(validator_.capacity() + resize_delta);
859 data_pool_ = &data_pool;
860 }
861 };
862
863 void updateUtilizationCounters_() {
864 // Update Counters
865 if(utilization_) {
866 utilization_->setValue(num_valid_);
867 }
868 }
869
876 void resizeInternalContainers_() {
877
878 // Assert that the Buffer class is in Infinite-Mode.
879 sparta_assert(is_infinite_mode_,
880 "The Buffer class must be in Infinite-Mode in order to resize itself.");
881
882 // We do not resize if there are available slots in buffer.
883 if(numFree() != 0) {
884 return;
885 }
886
887 // Resize the buffer_map_ with the amount provided by user.
888 buffer_map_.resize(buffer_map_.capacity() + resize_delta_);
889
890 // The number of entries the buffer can hold is its capacity.
891 num_entries_ = buffer_map_.capacity();
892
893 // Resize the data_pool_ to twice the capacity of the buffer_map_.
894 data_pool_.resize(num_entries_ * 2);
895
896 // The number of entries the pool can hold is its capacity.
897 data_pool_size_ = data_pool_.capacity();
898
899
900 // Each entry in data_pool_ should have their next free position
901 // pointer point to the slot to its right.
902 for(uint32_t i = 0; i < data_pool_size_ - 1; ++i) {
903 data_pool_[i].next_free = &data_pool_[i + 1];
904 }
905
906 // The last entry should point to itself as the next free position.
907 data_pool_[data_pool_size_ - 1].next_free = &data_pool_[data_pool_size_ - 1];
908
909 // The first position should point to the first entry.
910 first_position_ = &data_pool_[0];
911
912 // The free position should point to the location according to
913 // the number of entries in the buffer.
914 free_position_ = &data_pool_[num_valid_];
915
916 // Make all the pointers in buffer_map_ point to the appropriate indexes.
917 for(uint32_t i = 0; i < num_valid_; ++i) {
918 buffer_map_[i] = &data_pool_[address_map_[i]];
919 }
920
921 // Resize the validator vector and relink the validator data pool.
922 validator_->resizeIteratorValidator(resize_delta_, data_pool_);
923 }
924
925 template<typename U>
926 iterator push_backImpl_(U&& dat)
927 {
928
929 // Check to see if the vectors need to be resized and relinked.
930 if(SPARTA_EXPECT_FALSE(is_infinite_mode_)) {
931 resizeInternalContainers_();
932 }
933 sparta_assert(numFree(), "Buffer exhausted");
934 sparta_assert(free_position_ != nullptr);
935 free_position_->allocate(std::forward<U>(dat));
936 free_position_->physical_idx = num_valid_;
937
938 // Create the entry to be returned.
939 iterator entry(this, free_position_);
940
941 // Do the append now. We can do this with different logic
942 // that does not require a process.
943 buffer_map_[num_valid_] = free_position_;
944
945 // Store the index in the data_pool_ to which current
946 // free_position_ points to. We need to relink all these
947 // pointers once the data_pool_ resizes.
948 address_map_[num_valid_] =
949 static_cast<uint32_t>(free_position_ - &data_pool_[0]);
950 //Mark this data pointer as valid
951 validator_->attachDataPointer(free_position_);
952 ++num_valid_;
953 free_position_ = free_position_->next_free;
954 updateUtilizationCounters_();
955
956 return entry;
957 }
958
959 template<typename U>
960 iterator insertImpl_(uint32_t idx, U&& dat)
961 {
962 // Check to see if the vectors need to be resized and relinked.
963 if(SPARTA_EXPECT_FALSE(is_infinite_mode_)) {
964 resizeInternalContainers_();
965 }
966 sparta_assert(numFree(), "Buffer '" << getName() << "' exhausted");
967 sparta_assert(idx <= num_valid_, "Buffer '" << getName()
968 << "': Cannot insert before a non valid index");
969 sparta_assert(free_position_ != nullptr);
970 free_position_->allocate(std::forward<U>(dat));
971 free_position_->physical_idx = idx;
972
973 //Mark this data pointer as valid
974 validator_->attachDataPointer(free_position_);
975
976 // Create the entry to be returned.
977 iterator entry(this, free_position_);
978
979 //Shift all the positions above idx in the map one space down.
980 uint32_t i = num_valid_;
981 while(i > idx)
982 {
983 //assert that we are not going to do an invalid read.
984 buffer_map_[i] = buffer_map_[i - 1];
985 buffer_map_[i]->physical_idx = i ;
986
987 // Shift the indexes in the map.
988 address_map_[i] = address_map_[i - 1];
989 --i;
990 }
991
992 buffer_map_[idx] = free_position_;
993
994 // Store the index in the data_pool_ to which current
995 // free_position_ points to. We need to relink all these
996 // pointers once the data_pool_ resizes.
997 address_map_[num_valid_] =
998 static_cast<uint32_t>(free_position_ - &data_pool_[0]);
999 ++num_valid_;
1000 free_position_ = free_position_->next_free;
1001 updateUtilizationCounters_();
1002 return entry;
1003 }
1004
1005 std::string name_;
1006 const Clock * clk_ = nullptr;
1007 size_type num_entries_ = 0;
1008 PointerList buffer_map_;
1009 size_type data_pool_size_ = 0;
1010 DataPool data_pool_;
1013 DataPointer* free_position_ = nullptr;
1014 DataPointer* first_position_ = nullptr;
1015 size_type num_valid_ = 0;
1016 std::unique_ptr<DataPointerValidator> validator_;
1019 // Counters
1020 std::unique_ptr<sparta::CycleHistogramStandalone> utilization_;
1021
1023 // Collectors
1024 std::unique_ptr<collection::IterableCollector<Buffer<value_type> > > collector_;
1025
1027 // The behaviour of these methods change accordingly.
1028 bool is_infinite_mode_ {false};
1029
1031 // The additional amount of entries the vector must allocate when resizing.
1033
1035 std::unordered_map<uint32_t, uint32_t> address_map_;
1036 };
1037
1039 // Definitions...
1040 template<class DataT>
1041 Buffer<DataT>::Buffer(const std::string & name,
1042 uint32_t num_entries,
1043 const Clock * clk,
1044 StatisticSet * statset,
1045 InstrumentationNode::visibility_t stat_vis_general,
1046 InstrumentationNode::visibility_t stat_vis_detailed,
1048 InstrumentationNode::visibility_t stat_vis_avg) :
1049 name_(name),
1050 clk_(clk),
1051 num_entries_(num_entries),
1052 data_pool_size_(num_entries* 2)
1053 {
1054 if((num_entries > 0) && statset)
1055 {
1056 utilization_.reset(new CycleHistogramStandalone(statset, clk_,
1057 name_ + "_utilization",
1058 name_ + " occupancy histogram",
1059 0, num_entries, 1, 0,
1060 stat_vis_general,
1061 stat_vis_detailed,
1062 stat_vis_max,
1063 stat_vis_avg));
1064 }
1065
1066 buffer_map_.resize(num_entries_);
1067 data_pool_.resize(data_pool_size_);
1068
1069 // Must set the validator before you clear
1070 validator_.reset(new DataPointerValidator(*this));
1071 clear();
1072 }
1073
1075 template<typename DataT>
1077 name_(std::move(rval.name_)),
1078 clk_(rval.clk_),
1079 num_entries_(rval.num_entries_),
1080 buffer_map_(std::move(rval.buffer_map_)),
1081 data_pool_size_(rval.data_pool_size_),
1082 data_pool_(std::move(rval.data_pool_)),
1083 free_position_(rval.free_position_),
1084 first_position_(rval.first_position_),
1085 num_valid_(rval.num_valid_),
1086 validator_(new DataPointerValidator(*this)),
1087 utilization_(std::move(rval.utilization_)),
1088 collector_(std::move(rval.collector_)),
1089 is_infinite_mode_(rval.is_infinite_mode_),
1090 resize_delta_(std::move(rval.resize_delta_)),
1091 address_map_(std::move(rval.address_map_)){
1092 rval.clk_ = nullptr;
1093 rval.num_entries_ = 0;
1094 rval.data_pool_size_ = 0;
1095 rval.free_position_ = nullptr;
1096 rval.first_position_ = nullptr;
1097 rval.num_valid_ = 0;
1098 rval.utilization_ = nullptr;
1099 rval.collector_ = nullptr;
1100 validator_->validator_ = std::move(rval.validator_->validator_);
1101 if(collector_) {
1102 collector_->reattach(this);
1103 }
1104 }
1105}
CycleHistogram implementation using sparta CycleCounter.
Defines a few handy (and now deprecated) C++ iterator traits.
Contains a collection implementation of various compile-time metaprogramming and Type-Detection APIs ...
Set of macros for Sparta assertions. Caught by the framework.
#define sparta_assert(...)
Simple variadic assertion that will throw a sparta_exception if the condition fails.
#define SPARTA_EXPECT_FALSE(x)
A macro for hinting to the compiler a particular condition should be considered most likely false.
Contains a statistic definition (some useful information which can be computed)
Contains a StatisticInstance which refers to a StatisticDef or Counter and some local state to comput...
A data structure allowing appending at the end, beginning, or middle, but erase anywhere with collaps...
Definition Buffer.hpp:74
iterator insert(uint32_t idx, const value_type &dat)
Insert the item BEFORE the given index.
Definition Buffer.hpp:558
std::reverse_iterator< iterator > reverse_iterator
Typedef for constant reverse iterator.
Definition Buffer.hpp:344
void clear()
Empty the contents of the Buffer.
Definition Buffer.hpp:694
size_type numFree() const
Return the number of free entries.
Definition Buffer.hpp:508
std::reverse_iterator< const_iterator > const_reverse_iterator
Typedef for regular reverse iterator.
Definition Buffer.hpp:341
const std::string & getName() const
Name of this resource.
Definition Buffer.hpp:407
Buffer(Buffer< value_type > &&)
Move Constructor to allow moves.
Definition Buffer.hpp:1076
const value_type & read(const const_iterator &entry) const
read the entry at the BufferIterator's location
Definition Buffer.hpp:435
iterator insert(const const_iterator &entry, value_type &&dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:593
const_reverse_iterator rend() const
Returns a const_reverse_iterator referring to start element in the Buffer container.
Definition Buffer.hpp:801
Buffer(const std::string &name, const uint32_t num_entries, const Clock *clk, StatisticSet *statset=nullptr, InstrumentationNode::visibility_t stat_vis_general=InstrumentationNode::AUTO_VISIBILITY, InstrumentationNode::visibility_t stat_vis_detailed=InstrumentationNode::VIS_HIDDEN, InstrumentationNode::visibility_t stat_vis_max=InstrumentationNode::AUTO_VISIBILITY, InstrumentationNode::visibility_t stat_vis_avg=InstrumentationNode::AUTO_VISIBILITY)
Construct a buffer.
Definition Buffer.hpp:1041
void erase(uint32_t idx)
erase a position in the Buffer immediately.
Definition Buffer.hpp:624
size_type size() const
Return the number of valid entries.
Definition Buffer.hpp:498
bool empty() const
Query if the buffer is empty.
Definition Buffer.hpp:720
BufferIterator< true > const_iterator
Typedef for constant iterator.
Definition Buffer.hpp:338
const_iterator begin() const
Get the const_iterator pointing to the begin of Buffer.
Definition Buffer.hpp:762
iterator push_back(const value_type &dat)
Append data to the end of Buffer, and return a BufferIterator.
Definition Buffer.hpp:521
reverse_iterator rend()
Returns an reverse iterator referring to starting element in the Buffer container.
Definition Buffer.hpp:787
void makeInfinite(const uint32_t resize_delta=1)
Makes the Buffer grow beyond its capacity. The buffer grows by adding new entries in its internal vec...
Definition Buffer.hpp:809
iterator begin()
Get the iterator pointing to the beginning of Buffer.
Definition Buffer.hpp:744
value_type & access(const const_reverse_iterator &entry)
Read and return the data at the given BufferIterator's location, reference.
Definition Buffer.hpp:472
iterator push_back(value_type &&dat)
Append data to the end of Buffer, and return a BufferIterator.
Definition Buffer.hpp:535
const_iterator end() const
Returns a const_iterator referring to past-the-end element in the Buffer container.
Definition Buffer.hpp:773
Buffer & operator=(const Buffer< value_type > &)=delete
No copies allowed for Buffer.
const value_type & read(uint32_t idx) const
Read and return the data at the given index, const reference.
Definition Buffer.hpp:426
iterator insert(const const_iterator &entry, const value_type &dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:587
BufferIterator< false > iterator
Typedef for regular iterator.
Definition Buffer.hpp:335
const value_type & read(const const_reverse_iterator &entry) const
read the entry at the BufferIterator's location
Definition Buffer.hpp:444
reverse_iterator rbegin()
Get the iterator pointing to the pass-the-end element of Buffer.
Definition Buffer.hpp:779
iterator insert(const const_reverse_iterator &entry, value_type &&dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:605
iterator erase(const const_iterator &entry)
erase the index at which the entry exists in the Buffer.
Definition Buffer.hpp:672
value_type & access(const const_iterator &entry)
Read and return the data at the given BufferIterator's location, reference.
Definition Buffer.hpp:464
value_type & accessBack()
Read and return the data at the bottom of the Buffer.
Definition Buffer.hpp:481
bool isValid(uint32_t idx) const
Determine if data at the index is valid.
Definition Buffer.hpp:417
iterator insert(uint32_t idx, value_type &&dat)
Insert the item BEFORE the given index.
Definition Buffer.hpp:581
size_type capacity() const
Return the fixed size of this buffer.
Definition Buffer.hpp:490
void enableCollection(TreeNode *parent)
Request that this queue begin collecting its contents for pipeline collection.
Definition Buffer.hpp:734
reverse_iterator erase(const const_reverse_iterator &entry)
erase the index at which the entry exists in the Buffer.
Definition Buffer.hpp:685
Buffer(const Buffer< value_type > &)=delete
No copies allowed for Buffer.
iterator insert(const const_reverse_iterator &entry, const value_type &dat)
Do an insert before a BufferIterator see insert method above.
Definition Buffer.hpp:599
value_type & access(uint32_t idx)
Read and return the data at the given index, reference.
Definition Buffer.hpp:455
const_reverse_iterator rbegin() const
Get the const_reverse_iterator pointing to the pass-the-end of Buffer.
Definition Buffer.hpp:793
iterator end()
Returns an iterator referring to past-the-end element in the Buffer container.
Definition Buffer.hpp:756
~Buffer()
Clear (and destruct the Buffer's contents)
Definition Buffer.hpp:402
A representation of simulated time.
Definition Clock.hpp:44
CycleHistogramStandalone class for uint64_t values.
uint32_t visibility_t
Continuous visibility level. Several key points along continum are indicated within Visibility.
@ VIS_HIDDEN
Hidden hint. Lowest possible visibility.
static constexpr visibility_t AUTO_VISIBILITY
The default sparta resource visibility value that should be used. This is an alias of VIS_MAX at the ...
Set of StatisticDef and CounterBase-derived objects for visiblility through a sparta Tree.
Node in a composite tree representing a sparta Tree item.
Definition TreeNode.hpp:205
A collector of any iterable type (std::vector, std::list, sparta::Buffer, etc)
Provides a wrapper around a value to ensure that the value is assigned.
Macros for handling exponential backoff.