artdaq_core  v3_05_07
ConcurrentQueue.hh
1 #ifndef artdaq_core_Core_ConcurrentQueue_hh
2 #define artdaq_core_Core_ConcurrentQueue_hh
3 
4 #include <algorithm>
5 #include <cstddef>
6 #include <exception>
7 #include <limits>
8 #include <list>
9 
10 #include <iostream> // debugging
11 #include "tracemf.h" // TRACE - note: no #define TRACE_NAME in .hh files
12 
13 #include <chrono>
14 #include <condition_variable>
15 #include <mutex>
16 #include <type_traits>
17 
18 // #include <boost/date_time/posix_time/posix_time_types.hpp>
19 // #include <boost/utility/enable_if.hpp>
20 // #include <boost/thread/condition.hpp>
21 // #include <boost/thread/mutex.hpp>
22 // #include <boost/thread/xtime.hpp>
23 
24 namespace artdaq {
48 namespace detail {
58 typedef std::chrono::duration<double> seconds;
59 
60 typedef size_t MemoryType;
61 
67 template<typename T>
69 {
70  typedef char TrueType;
71 
72  struct FalseType
73  {
74  TrueType _[2];
75  };
76 
77  template<MemoryType (T::*)() const>
78  struct TestConst;
79 
80  template<typename C>
81  static TrueType test(TestConst<&C::memoryUsed>*)
82  {
83  return 0;
84  }
85 
86  template<typename C>
87  static FalseType test(...)
88  {
89  return {};
90  }
91 
92 public:
98  static const bool value = (sizeof(test<T>(nullptr)) == sizeof(TrueType));
99 };
100 
107 template<typename T>
109 memoryUsage(const std::pair<T, size_t>& t)
110 {
111  MemoryType usage(0UL);
112  try
113  {
114  usage = t.first.memoryUsed();
115  }
116  catch (...)
117  {}
118  return usage;
119 }
120 
127 template<typename T>
128 typename std::enable_if<hasMemoryUsed<T>::value, MemoryType>::type
129 memoryUsage(const T& t)
130 {
131  MemoryType usage(0UL);
132  try
133  {
134  usage = t.memoryUsed();
135  }
136  catch (...)
137  {}
138  return usage;
139 }
140 
147 template<typename T>
148 typename std::enable_if<!hasMemoryUsed<T>::value, MemoryType>::type
149 memoryUsage(const T& t)
150 {
151  return sizeof(t);
152 }
153 } // end namespace detail
154 
159 template<class T>
161 {
162  typedef bool ReturnType;
163 
164  typedef T ValueType;
165  typedef std::list<T> SequenceType;
166  typedef typename SequenceType::size_type SizeType;
167 
171  static struct QueueIsFull : public std::exception
172  {
177  virtual const char* what() const throw()
178  {
179  return "Cannot add item to a full queue";
180  }
181  } queueIsFull;
182 
192  static void doInsert(
193  T const& item,
194  SequenceType& elements,
195  SizeType& size,
196  detail::MemoryType const& itemSize,
197  detail::MemoryType& used,
198  std::condition_variable& nonempty)
199  {
200  elements.push_back(item);
201  ++size;
202  used += itemSize;
203  nonempty.notify_one();
204  }
205 
220  T const& item,
221  SequenceType& elements,
222  SizeType& size,
223  SizeType& capacity,
224  detail::MemoryType& used,
225  detail::MemoryType& memory,
226  size_t& elementsDropped,
227  std::condition_variable& nonempty)
228  {
229  detail::MemoryType itemSize = detail::memoryUsage(item);
230  if (size >= capacity || used + itemSize > memory)
231  {
232  ++elementsDropped;
233  throw queueIsFull;
234  }
235  else
236  {
237  doInsert(item, elements, size, itemSize, used, nonempty);
238  }
239  return true;
240  }
241 };
242 
243 template<typename T>
245 
250 template<class T>
252 {
253  typedef std::pair<T, size_t> ValueType;
254  typedef std::list<T> SequenceType;
255  typedef typename SequenceType::size_type SizeType;
257 
267  static void doInsert(
268  T const& item,
269  SequenceType& elements,
270  SizeType& size,
271  detail::MemoryType const& itemSize,
272  detail::MemoryType& used,
273  std::condition_variable& nonempty)
274  {
275  elements.push_back(item);
276  ++size;
277  used += itemSize;
278  nonempty.notify_one();
279  }
280 
294  T const& item,
295  SequenceType& elements,
296  SizeType& size,
297  SizeType& capacity,
298  detail::MemoryType& used,
299  detail::MemoryType& memory,
300  size_t& elementsDropped,
301  std::condition_variable& nonempty)
302  {
303  SizeType elementsRemoved(0);
304  detail::MemoryType itemSize = detail::memoryUsage(item);
305  while ((size == capacity || used + itemSize > memory) && !elements.empty())
306  {
307  SequenceType holder;
308  // Move the item out of elements in a manner that will not throw.
309  holder.splice(holder.begin(), elements, elements.begin());
310  // Record the change in the length of elements.
311  --size;
312  used -= detail::memoryUsage(holder.front());
313  ++elementsRemoved;
314  }
315  if (size < capacity && used + itemSize <= memory)
316  // we succeeded to make enough room for the new element
317  {
318  doInsert(item, elements, size, itemSize, used, nonempty);
319  }
320  else
321  {
322  // we cannot add the new element
323  ++elementsRemoved;
324  }
325  elementsDropped += elementsRemoved;
326  return elementsRemoved;
327  }
328 };
329 
334 template<class T>
336 {
337  typedef std::pair<T, size_t> ValueType;
338  typedef std::list<T> SequenceType;
339  typedef typename SequenceType::size_type SizeType;
341 
351  static void doInsert(
352  T const& item,
353  SequenceType& elements,
354  SizeType& size,
355  detail::MemoryType const& itemSize,
356  detail::MemoryType& used,
357  std::condition_variable& nonempty)
358  {
359  elements.push_back(item);
360  ++size;
361  used += itemSize;
362  nonempty.notify_one();
363  }
364 
378  T const& item,
379  SequenceType& elements,
380  SizeType& size,
381  SizeType& capacity,
382  detail::MemoryType& used,
383  detail::MemoryType& memory,
384  size_t& elementsDropped,
385  std::condition_variable& nonempty)
386  {
387  detail::MemoryType itemSize = detail::memoryUsage(item);
388  if (size < capacity && used + itemSize <= memory)
389  {
390  doInsert(item, elements, size, itemSize, used, nonempty);
391  return 0;
392  }
393  ++elementsDropped;
394  return 1;
395  }
396 };
397 
401 template<class T, class EnqPolicy = FailIfFull<T>>
403 {
404 public:
405  typedef typename EnqPolicy::ValueType ValueType;
406  typedef typename EnqPolicy::SequenceType SequenceType;
407  typedef typename SequenceType::size_type SizeType;
408 
415  explicit ConcurrentQueue(
416  SizeType maxSize = std::numeric_limits<SizeType>::max(),
417  detail::MemoryType maxMemory = std::numeric_limits<detail::MemoryType>::max());
418 
426 
443  typename EnqPolicy::ReturnType enqNowait(T const& item);
444 
453  void enqWait(T const& item);
454 
467  bool enqTimedWait(T const& item, detail::seconds const& wait);
468 
481  bool deqNowait(ValueType& item);
482 
493  void deqWait(ValueType& item);
494 
508  bool deqTimedWait(ValueType& item, detail::seconds const& wait);
509 
514  bool empty() const;
515 
520  bool full() const;
521 
527  SizeType size() const;
528 
534  SizeType capacity() const;
535 
544 
549  detail::MemoryType used() const;
550 
556  detail::MemoryType memory() const;
557 
566  bool setMemory(detail::MemoryType maxMemory);
567 
573  SizeType clear();
574 
579  void addExternallyDroppedEvents(SizeType dropped);
580 
585  bool queueReaderIsReady() const { return readerReady_; }
586 
594  void setReaderIsReady(bool rdy = true)
595  {
596  readyTime_ = std::chrono::steady_clock::now();
597  readerReady_ = rdy;
598  }
599 
604  std::chrono::steady_clock::time_point getReadyTime() const { return readyTime_; }
605 
606 private:
607  typedef std::lock_guard<std::mutex> LockType;
608  typedef std::unique_lock<std::mutex> WaitLockType;
609 
610  mutable std::mutex protectElements_;
611  mutable std::condition_variable queueNotEmpty_;
612  mutable std::condition_variable queueNotFull_;
613 
614  std::chrono::steady_clock::time_point readyTime_;
615  bool readerReady_;
616  SequenceType elements_;
617  SizeType capacity_;
618  SizeType size_;
619  /*
620  N.B.: we rely on SizeType *not* being some synthesized large
621  type, so that reading the value is an atomic action, as is
622  incrementing or decrementing the value. We do *not* assume that
623  there is any atomic getAndIncrement or getAndDecrement
624  operation.
625  */
626  detail::MemoryType memory_;
627  detail::MemoryType used_;
628  size_t elementsDropped_;
629 
630  /*
631  These private member functions assume that whatever locks
632  necessary for safe operation have already been obtained.
633  */
634 
635  /*
636  Insert the given item into the list, if it is not already full,
637  and increment size. Return true if the item is inserted, and
638  false if not.
639  */
640  bool insertIfPossible(T const& item);
641 
655  bool removeHeadIfPossible(ValueType& item);
656 
668  void removeHead(ValueType& item);
669 
670  void assignItem(T& item, const T& element);
671 
672  void assignItem(std::pair<T, size_t>& item, const T& element);
673 
674  /*
675  Return false if the queue can accept new entries.
676  */
677  bool isFull() const;
678 
679  /*
680  These functions are declared private and not implemented to
681  prevent their use.
682  */
684 
685  ConcurrentQueue& operator=(ConcurrentQueue<T, EnqPolicy> const&) = delete;
686 };
687 
688 //------------------------------------------------------------------
689 // Implementation follows
690 //------------------------------------------------------------------
691 
692 template<class T, class EnqPolicy>
694  SizeType maxSize,
695  detail::MemoryType maxMemory)
696  : protectElements_()
697  , readyTime_(std::chrono::steady_clock::now())
698  , readerReady_(false)
699  , elements_()
700  , capacity_(maxSize)
701  , size_(0)
702  , memory_(maxMemory)
703  , used_(0)
704  , elementsDropped_(0)
705 {}
706 
707 template<class T, class EnqPolicy>
709 {
710  LockType lock(protectElements_);
711  elements_.clear();
712  size_ = 0;
713  used_ = 0;
714  elementsDropped_ = 0;
715 }
716 
717 // enqueue methods - 3 - enqNowait, enqWait, enqTimedWait
718 
719 template<class T, class EnqPolicy>
720 typename EnqPolicy::ReturnType ConcurrentQueue<T, EnqPolicy>::enqNowait(T const& item)
721 {
722  TLOG(12, "ConcurrentQueue") << "enqNowait enter size=" << size_ << " capacity=" << capacity_ << " used=" << used_ << " memory=" << memory_;
723  LockType lock(protectElements_);
724  auto retval = EnqPolicy::doEnq(item, elements_, size_, capacity_, used_, memory_,
725  elementsDropped_, queueNotEmpty_);
726  TLOG(12, "ConcurrentQueue") << "enqNowait returning " << retval;
727  return retval;
728 }
729 
730 template<class T, class EnqPolicy>
732 {
733  TLOG(13, "ConcurrentQueue") << "enqWait enter";
734  WaitLockType lock(protectElements_);
735  while (isFull()) { queueNotFull_.wait(lock); }
736  EnqPolicy::doInsert(item, elements_, size_,
737  detail::memoryUsage(item), used_, queueNotEmpty_);
738  TLOG(13, "ConcurrentQueue") << "enqWait returning";
739 }
740 
741 template<class T, class EnqPolicy>
743 {
744  TLOG(14, "ConcurrentQueue") << "ConcurrentQueue<T,EnqPolicy>::enqTimedWait enter with waitTime=" << std::chrono::duration_cast<std::chrono::milliseconds>(waitTime).count() << " ms size=" << size_
745  << " capacity=" << capacity_ << " used=" << used_ << " memory=" << memory_;
746  WaitLockType lock(protectElements_);
747  if (isFull())
748  {
749  queueNotFull_.wait_for(lock, waitTime);
750  }
751  bool retval = insertIfPossible(item);
752  TLOG(14, "ConcurrentQueue") << "ConcurrentQueue<T,EnqPolicy>::enqTimedWait returning " << retval;
753  return retval;
754 }
755 
756 // dequeue methods - 3 - deqNowait, deqWait, deqTimedWait
757 
758 template<class T, class EnqPolicy>
760 {
761  TLOG(15, "ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqNowait enter";
762  LockType lock(protectElements_);
763  bool retval = removeHeadIfPossible(item);
764  TLOG(15, "ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqNowait returning " << retval;
765  return retval;
766 }
767 
768 template<class T, class EnqPolicy>
770 {
771  TLOG(16, "ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqWait enter";
772  WaitLockType lock(protectElements_);
773  while (size_ == 0) { queueNotEmpty_.wait(lock); }
774  removeHead(item);
775  TLOG(16, "ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqWait returning";
776 }
777 
778 template<class T, class EnqPolicy>
780 {
781  TLOG(17, "ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqTimedWait enter with waitTime=" << std::chrono::duration_cast<std::chrono::milliseconds>(waitTime).count() << " ms size=" << size_;
782  WaitLockType lock(protectElements_);
783  if (size_ == 0)
784  {
785  queueNotEmpty_.wait_for(lock, waitTime);
786  }
787  bool retval = removeHeadIfPossible(item);
788  TLOG(17, "ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqTimedWait returning " << retval << " size=" << size_;
789  return retval;
790 }
791 
792 template<class T, class EnqPolicy>
794 {
795  // No lock is necessary: the read is atomic.
796  return size_ == 0;
797 }
798 
799 template<class T, class EnqPolicy>
801 {
802  LockType lock(protectElements_);
803  return isFull();
804 }
805 
806 template<class T, class EnqPolicy>
809 {
810  // No lock is necessary: the read is atomic.
811  return size_;
812 }
813 
814 template<class T, class EnqPolicy>
817 {
818  // No lock is necessary: the read is atomic.
819  return capacity_;
820 }
821 
822 template<class T, class EnqPolicy>
824 {
825  LockType lock(protectElements_);
826  bool isEmpty = (size_ == 0);
827  if (isEmpty) { capacity_ = newcapacity; }
828  return isEmpty;
829 }
830 
831 template<class T, class EnqPolicy>
834 {
835  // No lock is necessary: the read is atomic.
836  return used_;
837 }
838 
839 template<class T, class EnqPolicy>
842 {
843  // No lock is necessary: the read is atomic.
844  return memory_;
845 }
846 
847 template<class T, class EnqPolicy>
849 {
850  LockType lock(protectElements_);
851  bool isEmpty = (size_ == 0);
852  if (isEmpty) { memory_ = newmemory; }
853  return isEmpty;
854 }
855 
856 template<class T, class EnqPolicy>
859 {
860  LockType lock(protectElements_);
861  SizeType clearedEvents = size_;
862  elementsDropped_ += size_;
863  elements_.clear();
864  size_ = 0;
865  used_ = 0;
866  return clearedEvents;
867 }
868 
869 template<class T, class EnqPolicy>
871 {
872  LockType lock(protectElements_);
873  elementsDropped_ += n;
874 }
875 
876 //-----------------------------------------------------------
877 // Private member functions
878 //-----------------------------------------------------------
879 
880 template<class T, class EnqPolicy>
882 {
883  if (isFull())
884  {
885  ++elementsDropped_;
886  return false;
887  }
888  else
889  {
890  EnqPolicy::doInsert(item, elements_, size_,
891  detail::memoryUsage(item), used_, queueNotEmpty_);
892  return true;
893  }
894 }
895 
896 template<class T, class EnqPolicy>
897 bool ConcurrentQueue<T, EnqPolicy>::removeHeadIfPossible(ValueType& item)
898 {
899  if (size_ == 0) { return false; }
900  removeHead(item);
901  return true;
902 }
903 
904 template<class T, class EnqPolicy>
905 void ConcurrentQueue<T, EnqPolicy>::removeHead(ValueType& item)
906 {
907  SequenceType holder;
908  // Move the item out of elements_ in a manner that will not throw.
909  holder.splice(holder.begin(), elements_, elements_.begin());
910  // Record the change in the length of elements_.
911  --size_;
912  queueNotFull_.notify_one();
913  assignItem(item, holder.front());
914  used_ -= detail::memoryUsage(item);
915 }
916 
917 template<class T, class EnqPolicy>
918 void ConcurrentQueue<T, EnqPolicy>::assignItem(T& item, const T& element)
919 {
920  item = element;
921 }
922 
923 template<class T, class EnqPolicy>
924 void ConcurrentQueue<T, EnqPolicy>::assignItem(std::pair<T, size_t>& item, const T& element)
925 {
926  item.first = element;
927  item.second = elementsDropped_;
928  elementsDropped_ = 0;
929 }
930 
931 template<class T, class EnqPolicy>
932 bool ConcurrentQueue<T, EnqPolicy>::isFull() const
933 {
934  if (size_ >= capacity_ || used_ >= memory_) { return true; }
935  return false;
936 }
937 } // namespace artdaq
938 
939 #endif /* artdaq_core_Core_ConcurrentQueue_hh */
std::chrono::steady_clock::time_point getReadyTime() const
Gets the time at which the queue became ready.
std::list< T > SequenceType
Type of sequences of items.
T ValueType
Type of values stored in queue.
static ReturnType doEnq(T const &item, SequenceType &elements, SizeType &size, SizeType &capacity, detail::MemoryType &used, detail::MemoryType &memory, size_t &elementsDropped, std::condition_variable &nonempty)
Attempts to enqueue an item.
SizeType clear()
Remove all items from the queue. This changes the size to zero but does not change the capacity...
detail::MemoryType used() const
Return the memory in bytes used by items in the queue.
std::pair< T, size_t > ValueType
Type of elements stored in the queue.
virtual const char * what() const
Describe exception.
detail::MemoryType memory() const
Return the memory of the queue in bytes, that is, the maximum memory the items in the queue may occup...
SequenceType::size_type SizeType
Size type of seqeuences.
MemoryType memoryUsage(const std::pair< T, size_t > &t)
Returns the memory used by an object.
std::pair< T, size_t > ValueType
Type of elements stored in the queue.
std::chrono::duration< double > seconds
EnqPolicy::SequenceType SequenceType
Type of sequence used by ConcurrentQueue.
EnqPolicy::ReturnType enqNowait(T const &item)
Add a copy if item to the queue, according to the rules determined by the EnqPolicy.
SequenceType::size_type SizeType
Size type of seqeuences.
static void doInsert(T const &item, SequenceType &elements, SizeType &size, detail::MemoryType const &itemSize, detail::MemoryType &used, std::condition_variable &nonempty)
Inserts element into the ConcurrentQueue.
std::list< T > SequenceType
Type of sequences of items.
bool setCapacity(SizeType capacity)
SizeType ReturnType
Type returned by doEnq.
SequenceType::size_type SizeType
Size type of seqeuences.
ConcurrentQueue policy to throw an exception when the queue is full.
size_t MemoryType
Basic unit of data storage and pointer types.
Exception thrown by FailIfFull policy when an enqueue operation is attempted on a full queue...
ConcurrentQueue policy to discard oldest elements when the queue is full.
static const bool value
Use SFINAE to figure out if the class used to instantiate the ConcurrentQueue template has a method m...
ConcurrentQueue policy to discard new elements when the queue is full.
static void doInsert(T const &item, SequenceType &elements, SizeType &size, detail::MemoryType const &itemSize, detail::MemoryType &used, std::condition_variable &nonempty)
Inserts element into the ConcurrentQueue.
bool deqNowait(ValueType &item)
Assign the value at the head of the queue to item and then remove the head of the queue...
bool setMemory(detail::MemoryType maxMemory)
Reset the memory usage in bytes of the queue. A value of 0 disabled the memory check. This can only be done if the queue is empty.
SizeType ReturnType
Type returned by doEnq.
artdaq::FailIfFull::QueueIsFull queueIsFull
Instance of QueueIsFull exception.
ConcurrentQueue(SizeType maxSize=std::numeric_limits< SizeType >::max(), detail::MemoryType maxMemory=std::numeric_limits< detail::MemoryType >::max())
ConcurrentQueue is always bounded. By default, the bound is absurdly large.
static ReturnType doEnq(T const &item, SequenceType &elements, SizeType &size, SizeType &capacity, detail::MemoryType &used, detail::MemoryType &memory, size_t &elementsDropped, std::condition_variable &nonempty)
Attempts to enqueue an item.
void enqWait(T const &item)
Add a copy of item to the queue.
bool deqTimedWait(ValueType &item, detail::seconds const &wait)
Assign the value at the head of the queue to item and then remove the head of the queue...
bool enqTimedWait(T const &item, detail::seconds const &wait)
Add a copy of item to the queue, waiting for the queue to be non-full.
std::list< T > SequenceType
Type of seqeuences of values.
void addExternallyDroppedEvents(SizeType dropped)
Adds the passed count to the counter of dropped events.
static void doInsert(T const &item, SequenceType &elements, SizeType &size, detail::MemoryType const &itemSize, detail::MemoryType &used, std::condition_variable &nonempty)
Inserts element into the ConcurrentQueue.
void deqWait(ValueType &item)
Assign the value of the head of the queue to item and then remove the head of the queue...
SizeType capacity() const
Return the capacity of the queue, that is, the maximum number of items it can contain.
void setReaderIsReady(bool rdy=true)
Set the ready flag for the reader.
EnqPolicy::ValueType ValueType
Type of values stored in ConcurrentQueue.
SequenceType::size_type SizeType
Type for indexes in sequence.
static ReturnType doEnq(T const &item, SequenceType &elements, SizeType &size, SizeType &capacity, detail::MemoryType &used, detail::MemoryType &memory, size_t &elementsDropped, std::condition_variable &nonempty)
Attempts to enqueue an item.
bool queueReaderIsReady() const
Is the reader connected and ready for items to appear on the queue?
bool ReturnType
Type returned by doEnq.
SizeType size() const