artdaq_core  v3_02_01
ConcurrentQueue.hh
1 #ifndef artdaq_core_Core_ConcurrentQueue_hh
2 #define artdaq_core_Core_ConcurrentQueue_hh
3 
4 #include <algorithm>
5 #include <cstddef>
6 #include <exception>
7 #include <limits>
8 #include <list>
9 
10 #include <iostream> // debugging
11 #include "tracemf.h" // TRACE - note: no #define TRACE_NAME in .hh files
12 
13 #include <chrono>
14 #include <condition_variable>
15 #include <mutex>
16 #include <type_traits>
17 
18 
19 // #include <boost/date_time/posix_time/posix_time_types.hpp>
20 // #include <boost/utility/enable_if.hpp>
21 // #include <boost/thread/condition.hpp>
22 // #include <boost/thread/mutex.hpp>
23 // #include <boost/thread/xtime.hpp>
24 
25 namespace artdaq
26 {
51  namespace detail
52  {
62  typedef std::chrono::duration<double> seconds;
63 
64  typedef size_t MemoryType;
65 
71  template <typename T>
73  {
74  typedef char TrueType;
75 
76  struct FalseType
77  {
78  TrueType _[2];
79  };
80 
81  template <MemoryType(T::*)() const>
82  struct TestConst;
83 
84  template <typename C>
85  static TrueType test(TestConst<&C::memoryUsed>*) { return 0; }
86 
87  template <typename C>
88  static FalseType test(...)
89  {
90  return {};
91  }
92 
93  public:
94 
100  static const bool value = (sizeof(test<T>(nullptr)) == sizeof(TrueType));
101  };
102 
109  template <typename T>
110  MemoryType
111  memoryUsage(const std::pair<T, size_t>& t)
112  {
113  MemoryType usage(0UL);
114  try
115  {
116  usage = t.first.memoryUsed();
117  }
118  catch (...) {}
119  return usage;
120  }
121 
128  template <typename T>
129  typename std::enable_if<hasMemoryUsed<T>::value, MemoryType>::type
130  memoryUsage(const T& t)
131  {
132  MemoryType usage(0UL);
133  try
134  {
135  usage = t.memoryUsed();
136  }
137  catch (...) {}
138  return usage;
139  }
140 
147  template <typename T>
148  typename std::enable_if<!hasMemoryUsed<T>::value, MemoryType>::type
149  memoryUsage(const T& t)
150  {
151  return sizeof(t);
152  }
153  }// end namespace detail
154 
155 
160  template <class T>
161  struct FailIfFull
162  {
163  typedef bool ReturnType;
164 
165  typedef T ValueType;
166  typedef std::list<T> SequenceType;
167  typedef typename SequenceType::size_type SizeType;
168 
172  static struct QueueIsFull : public std::exception
173  {
178  virtual const char* what() const throw()
179  {
180  return "Cannot add item to a full queue";
181  }
182  } queueIsFull;
183 
193  static void doInsert
194  (
195  T const& item,
196  SequenceType& elements,
197  SizeType& size,
198  detail::MemoryType const& itemSize,
199  detail::MemoryType& used,
200  std::condition_variable& nonempty
201  )
202  {
203  elements.push_back(item);
204  ++size;
205  used += itemSize;
206  nonempty.notify_one();
207  }
208 
222  static ReturnType doEnq
223  (
224  T const& item,
225  SequenceType& elements,
226  SizeType& size,
227  SizeType& capacity,
228  detail::MemoryType& used,
229  detail::MemoryType& memory,
230  size_t& elementsDropped,
231  std::condition_variable& nonempty
232  )
233  {
234  detail::MemoryType itemSize = detail::memoryUsage(item);
235  if (size >= capacity || used + itemSize > memory)
236  {
237  ++elementsDropped;
238  throw queueIsFull;
239  }
240  else
241  {
242  doInsert(item, elements, size, itemSize, used, nonempty);
243  }
244  return true;
245  }
246  };
247 
248  template <typename T>
250 
255  template <class T>
256  struct KeepNewest
257  {
258  typedef std::pair<T, size_t> ValueType;
259  typedef std::list<T> SequenceType;
260  typedef typename SequenceType::size_type SizeType;
262 
272  static void doInsert
273  (
274  T const& item,
275  SequenceType& elements,
276  SizeType& size,
277  detail::MemoryType const& itemSize,
278  detail::MemoryType& used,
279  std::condition_variable& nonempty
280  )
281  {
282  elements.push_back(item);
283  ++size;
284  used += itemSize;
285  nonempty.notify_one();
286  }
287 
300  static ReturnType doEnq
301  (
302  T const& item,
303  SequenceType& elements,
304  SizeType& size,
305  SizeType& capacity,
306  detail::MemoryType& used,
307  detail::MemoryType& memory,
308  size_t& elementsDropped,
309  std::condition_variable& nonempty
310  )
311  {
312  SizeType elementsRemoved(0);
313  detail::MemoryType itemSize = detail::memoryUsage(item);
314  while ((size == capacity || used + itemSize > memory) && !elements.empty())
315  {
316  SequenceType holder;
317  // Move the item out of elements in a manner that will not throw.
318  holder.splice(holder.begin(), elements, elements.begin());
319  // Record the change in the length of elements.
320  --size;
321  used -= detail::memoryUsage(holder.front());
322  ++elementsRemoved;
323  }
324  if (size < capacity && used + itemSize <= memory)
325  // we succeeded to make enough room for the new element
326  {
327  doInsert(item, elements, size, itemSize, used, nonempty);
328  }
329  else
330  {
331  // we cannot add the new element
332  ++elementsRemoved;
333  }
334  elementsDropped += elementsRemoved;
335  return elementsRemoved;
336  }
337  };
338 
343  template <class T>
345  {
346  typedef std::pair<T, size_t> ValueType;
347  typedef std::list<T> SequenceType;
348  typedef typename SequenceType::size_type SizeType;
350 
360  static void doInsert
361  (
362  T const& item,
363  SequenceType& elements,
364  SizeType& size,
365  detail::MemoryType const& itemSize,
366  detail::MemoryType& used,
367  std::condition_variable& nonempty
368  )
369  {
370  elements.push_back(item);
371  ++size;
372  used += itemSize;
373  nonempty.notify_one();
374  }
375 
388  static ReturnType doEnq
389  (
390  T const& item,
391  SequenceType& elements,
392  SizeType& size,
393  SizeType& capacity,
394  detail::MemoryType& used,
395  detail::MemoryType& memory,
396  size_t& elementsDropped,
397  std::condition_variable& nonempty
398  )
399  {
400  detail::MemoryType itemSize = detail::memoryUsage(item);
401  if (size < capacity && used + itemSize <= memory)
402  {
403  doInsert(item, elements, size, itemSize, used, nonempty);
404  return 0;
405  }
406  ++elementsDropped;
407  return 1;
408  }
409  };
410 
414  template <class T, class EnqPolicy = FailIfFull<T>>
416  {
417  public:
418  typedef typename EnqPolicy::ValueType ValueType;
419  typedef typename EnqPolicy::SequenceType SequenceType;
420  typedef typename SequenceType::size_type SizeType;
421 
428  explicit ConcurrentQueue
429  (
430  SizeType maxSize = std::numeric_limits<SizeType>::max(),
431  detail::MemoryType maxMemory = std::numeric_limits<detail::MemoryType>::max()
432  );
433 
441 
458  typename EnqPolicy::ReturnType enqNowait(T const& item);
459 
468  void enqWait(T const& item);
469 
482  bool enqTimedWait(T const& item, detail::seconds const& wait);
483 
496  bool deqNowait(ValueType& item);
497 
508  void deqWait(ValueType& item);
509 
523  bool deqTimedWait(ValueType& item, detail::seconds const& wait);
524 
529  bool empty() const;
530 
535  bool full() const;
536 
542  SizeType size() const;
543 
549  SizeType capacity() const;
550 
559 
564  detail::MemoryType used() const;
565 
571  detail::MemoryType memory() const;
572 
581  bool setMemory(detail::MemoryType maxMemory);
582 
588  SizeType clear();
589 
594  void addExternallyDroppedEvents(SizeType dropped);
595 
600  bool queueReaderIsReady() const { return readerReady_; }
601 
609  void setReaderIsReady(bool rdy = true)
610  {
611  readyTime_ = std::chrono::steady_clock::now();
612  readerReady_ = rdy;
613  }
614 
619  std::chrono::steady_clock::time_point getReadyTime() const { return readyTime_; }
620 
621  private:
622  typedef std::lock_guard<std::mutex> LockType;
623  typedef std::unique_lock<std::mutex> WaitLockType;
624 
625  mutable std::mutex protectElements_;
626  mutable std::condition_variable queueNotEmpty_;
627  mutable std::condition_variable queueNotFull_;
628 
629  std::chrono::steady_clock::time_point readyTime_;
630  bool readerReady_;
631  SequenceType elements_;
632  SizeType capacity_;
633  SizeType size_;
634  /*
635  N.B.: we rely on SizeType *not* being some synthesized large
636  type, so that reading the value is an atomic action, as is
637  incrementing or decrementing the value. We do *not* assume that
638  there is any atomic getAndIncrement or getAndDecrement
639  operation.
640  */
641  detail::MemoryType memory_;
642  detail::MemoryType used_;
643  size_t elementsDropped_;
644 
645  /*
646  These private member functions assume that whatever locks
647  necessary for safe operation have already been obtained.
648  */
649 
650  /*
651  Insert the given item into the list, if it is not already full,
652  and increment size. Return true if the item is inserted, and
653  false if not.
654  */
655  bool insertIfPossible(T const& item);
656 
670  bool removeHeadIfPossible(ValueType& item);
671 
683  void removeHead(ValueType& item);
684 
685  void assignItem(T& item, const T& element);
686 
687  void assignItem(std::pair<T, size_t>& item, const T& element);
688 
689  /*
690  Return false if the queue can accept new entries.
691  */
692  bool isFull() const;
693 
694  /*
695  These functions are declared private and not implemented to
696  prevent their use.
697  */
699 
700  ConcurrentQueue& operator=(ConcurrentQueue<T, EnqPolicy> const&) = delete;
701  };
702 
703  //------------------------------------------------------------------
704  // Implementation follows
705  //------------------------------------------------------------------
706 
707  template <class T, class EnqPolicy>
709  (
710  SizeType maxSize,
711  detail::MemoryType maxMemory
712  ) :
713  protectElements_()
714  , readyTime_(std::chrono::steady_clock::now())
715  , readerReady_(false)
716  , elements_()
717  , capacity_(maxSize)
718  , size_(0)
719  , memory_(maxMemory)
720  , used_(0)
721  , elementsDropped_(0) {}
722 
723  template <class T, class EnqPolicy>
725  {
726  LockType lock(protectElements_);
727  elements_.clear();
728  size_ = 0;
729  used_ = 0;
730  elementsDropped_ = 0;
731  }
732 
733 
734  // enqueue methods - 3 - enqNowait, enqWait, enqTimedWait
735 
736  template <class T, class EnqPolicy>
737  typename EnqPolicy::ReturnType ConcurrentQueue<T, EnqPolicy>::enqNowait(T const& item)
738  {
739  TLOG(12,"ConcurrentQueue") << "enqNowait enter size=" << size_ << " capacity=" << capacity_ << " used=" << used_ << " memory=" << memory_ ;
740  LockType lock(protectElements_);
741  auto retval = EnqPolicy::doEnq(item, elements_, size_, capacity_, used_, memory_,
742  elementsDropped_, queueNotEmpty_);
743  TLOG(12,"ConcurrentQueue") << "enqNowait returning " << retval ;
744  return retval;
745  }
746 
747  template <class T, class EnqPolicy>
749  {
750  TLOG(13,"ConcurrentQueue") << "enqWait enter" ;
751  WaitLockType lock(protectElements_);
752  while (isFull()) { queueNotFull_.wait(lock); }
753  EnqPolicy::doInsert(item, elements_, size_,
754  detail::memoryUsage(item), used_, queueNotEmpty_);
755  TLOG(13,"ConcurrentQueue") << "enqWait returning" ;
756  }
757 
758  template <class T, class EnqPolicy>
760  {
761  TLOG(14,"ConcurrentQueue") << "ConcurrentQueue<T,EnqPolicy>::enqTimedWait enter with waitTime=" << std::chrono::duration_cast<std::chrono::milliseconds>(waitTime).count() << " ms size=" << size_
762  << " capacity=" << capacity_ << " used=" << used_ << " memory=" << memory_ ;
763  WaitLockType lock(protectElements_);
764  if (isFull())
765  {
766  queueNotFull_.wait_for(lock, waitTime);
767  }
768  bool retval = insertIfPossible(item);
769  TLOG(14,"ConcurrentQueue") << "ConcurrentQueue<T,EnqPolicy>::enqTimedWait returning " << retval ;
770  return retval;
771  }
772 
773 
774  // dequeue methods - 3 - deqNowait, deqWait, deqTimedWait
775 
776  template <class T, class EnqPolicy>
778  {
779  TLOG(15,"ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqNowait enter" ;
780  LockType lock(protectElements_);
781  bool retval = removeHeadIfPossible(item);
782  TLOG(15,"ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqNowait returning " << retval ;
783  return retval;
784  }
785 
786  template <class T, class EnqPolicy>
788  {
789  TLOG(16,"ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqWait enter" ;
790  WaitLockType lock(protectElements_);
791  while (size_ == 0) { queueNotEmpty_.wait(lock); }
792  removeHead(item);
793  TLOG(16,"ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqWait returning" ;
794  }
795 
796  template <class T, class EnqPolicy>
798  {
799  TLOG(17,"ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqTimedWait enter with waitTime=" << std::chrono::duration_cast<std::chrono::milliseconds>(waitTime).count() << " ms size=" << size_ ;
800  WaitLockType lock(protectElements_);
801  if (size_ == 0)
802  {
803  queueNotEmpty_.wait_for(lock, waitTime);
804  }
805  bool retval = removeHeadIfPossible(item);
806  TLOG(17,"ConcurrentQueue") << "ConcurrentQueue<T, EnqPolicy>::deqTimedWait returning " << retval << " size=" << size_ ;
807  return retval;
808  }
809 
810 
811  template <class T, class EnqPolicy>
812  bool
814  {
815  // No lock is necessary: the read is atomic.
816  return size_ == 0;
817  }
818 
819  template <class T, class EnqPolicy>
820  bool
822  {
823  LockType lock(protectElements_);
824  return isFull();
825  }
826 
827  template <class T, class EnqPolicy>
830  {
831  // No lock is necessary: the read is atomic.
832  return size_;
833  }
834 
835  template <class T, class EnqPolicy>
838  {
839  // No lock is necessary: the read is atomic.
840  return capacity_;
841  }
842 
843  template <class T, class EnqPolicy>
844  bool
846  {
847  LockType lock(protectElements_);
848  bool isEmpty = (size_ == 0);
849  if (isEmpty) { capacity_ = newcapacity; }
850  return isEmpty;
851  }
852 
853  template <class T, class EnqPolicy>
856  {
857  // No lock is necessary: the read is atomic.
858  return used_;
859  }
860 
861  template <class T, class EnqPolicy>
864  {
865  // No lock is necessary: the read is atomic.
866  return memory_;
867  }
868 
869  template <class T, class EnqPolicy>
870  bool
872  {
873  LockType lock(protectElements_);
874  bool isEmpty = (size_ == 0);
875  if (isEmpty) { memory_ = newmemory; }
876  return isEmpty;
877  }
878 
879  template <class T, class EnqPolicy>
882  {
883  LockType lock(protectElements_);
884  SizeType clearedEvents = size_;
885  elementsDropped_ += size_;
886  elements_.clear();
887  size_ = 0;
888  used_ = 0;
889  return clearedEvents;
890  }
891 
892  template <class T, class EnqPolicy>
893  void
895  {
896  LockType lock(protectElements_);
897  elementsDropped_ += n;
898  }
899 
900  //-----------------------------------------------------------
901  // Private member functions
902  //-----------------------------------------------------------
903 
904  template <class T, class EnqPolicy>
905  bool
907  {
908  if (isFull())
909  {
910  ++elementsDropped_;
911  return false;
912  }
913  else
914  {
915  EnqPolicy::doInsert(item, elements_, size_,
916  detail::memoryUsage(item), used_, queueNotEmpty_);
917  return true;
918  }
919  }
920 
921  template <class T, class EnqPolicy>
922  bool
923  ConcurrentQueue<T, EnqPolicy>::removeHeadIfPossible(ValueType& item)
924  {
925  if (size_ == 0) { return false; }
926  removeHead(item);
927  return true;
928  }
929 
930  template <class T, class EnqPolicy>
931  void
932  ConcurrentQueue<T, EnqPolicy>::removeHead(ValueType& item)
933  {
934  SequenceType holder;
935  // Move the item out of elements_ in a manner that will not throw.
936  holder.splice(holder.begin(), elements_, elements_.begin());
937  // Record the change in the length of elements_.
938  --size_;
939  queueNotFull_.notify_one();
940  assignItem(item, holder.front());
941  used_ -= detail::memoryUsage(item);
942  }
943 
944  template <class T, class EnqPolicy>
945  void
946  ConcurrentQueue<T, EnqPolicy>::assignItem(T& item, const T& element)
947  {
948  item = element;
949  }
950 
951  template <class T, class EnqPolicy>
952  void
953  ConcurrentQueue<T, EnqPolicy>::assignItem(std::pair<T, size_t>& item, const T& element)
954  {
955  item.first = element;
956  item.second = elementsDropped_;
957  elementsDropped_ = 0;
958  }
959 
960  template <class T, class EnqPolicy>
961  bool
962  ConcurrentQueue<T, EnqPolicy>::isFull() const
963  {
964  if (size_ >= capacity_ || used_ >= memory_) { return true; }
965  return false;
966  }
967 } // namespace daqrate
968 
969 #endif /* artdaq_core_Core_ConcurrentQueue_hh */
std::chrono::steady_clock::time_point getReadyTime() const
Gets the time at which the queue became ready.
std::list< T > SequenceType
Type of sequences of items.
T ValueType
Type of values stored in queue.
static ReturnType doEnq(T const &item, SequenceType &elements, SizeType &size, SizeType &capacity, detail::MemoryType &used, detail::MemoryType &memory, size_t &elementsDropped, std::condition_variable &nonempty)
Attempts to enqueue an item.
SizeType clear()
Remove all items from the queue. This changes the size to zero but does not change the capacity...
detail::MemoryType used() const
Return the memory in bytes used by items in the queue.
std::pair< T, size_t > ValueType
Type of elements stored in the queue.
virtual const char * what() const
Describe exception.
detail::MemoryType memory() const
Return the memory of the queue in bytes, that is, the maximum memory the items in the queue may occup...
SequenceType::size_type SizeType
Size type of seqeuences.
MemoryType memoryUsage(const std::pair< T, size_t > &t)
Returns the memory used by an object.
std::pair< T, size_t > ValueType
Type of elements stored in the queue.
std::chrono::duration< double > seconds
EnqPolicy::SequenceType SequenceType
Type of sequence used by ConcurrentQueue.
EnqPolicy::ReturnType enqNowait(T const &item)
Add a copy if item to the queue, according to the rules determined by the EnqPolicy.
SequenceType::size_type SizeType
Size type of seqeuences.
static void doInsert(T const &item, SequenceType &elements, SizeType &size, detail::MemoryType const &itemSize, detail::MemoryType &used, std::condition_variable &nonempty)
Inserts element into the ConcurrentQueue.
std::list< T > SequenceType
Type of sequences of items.
bool setCapacity(SizeType capacity)
SizeType ReturnType
Type returned by doEnq.
SequenceType::size_type SizeType
Size type of seqeuences.
ConcurrentQueue policy to throw an exception when the queue is full.
size_t MemoryType
Basic unit of data storage and pointer types.
Exception thrown by FailIfFull policy when an enqueue operation is attempted on a full queue...
ConcurrentQueue policy to discard oldest elements when the queue is full.
static const bool value
Use SFINAE to figure out if the class used to instantiate the ConcurrentQueue template has a method m...
ConcurrentQueue policy to discard new elements when the queue is full.
static void doInsert(T const &item, SequenceType &elements, SizeType &size, detail::MemoryType const &itemSize, detail::MemoryType &used, std::condition_variable &nonempty)
Inserts element into the ConcurrentQueue.
bool deqNowait(ValueType &item)
Assign the value at the head of the queue to item and then remove the head of the queue...
bool setMemory(detail::MemoryType maxMemory)
Reset the memory usage in bytes of the queue. A value of 0 disabled the memory check. This can only be done if the queue is empty.
SizeType ReturnType
Type returned by doEnq.
artdaq::FailIfFull::QueueIsFull queueIsFull
Instance of QueueIsFull exception.
ConcurrentQueue(SizeType maxSize=std::numeric_limits< SizeType >::max(), detail::MemoryType maxMemory=std::numeric_limits< detail::MemoryType >::max())
ConcurrentQueue is always bounded. By default, the bound is absurdly large.
static ReturnType doEnq(T const &item, SequenceType &elements, SizeType &size, SizeType &capacity, detail::MemoryType &used, detail::MemoryType &memory, size_t &elementsDropped, std::condition_variable &nonempty)
Attempts to enqueue an item.
void enqWait(T const &item)
Add a copy of item to the queue.
bool deqTimedWait(ValueType &item, detail::seconds const &wait)
Assign the value at the head of the queue to item and then remove the head of the queue...
bool enqTimedWait(T const &item, detail::seconds const &wait)
Add a copy of item to the queue, waiting for the queue to be non-full.
std::list< T > SequenceType
Type of seqeuences of values.
void addExternallyDroppedEvents(SizeType dropped)
Adds the passed count to the counter of dropped events.
static void doInsert(T const &item, SequenceType &elements, SizeType &size, detail::MemoryType const &itemSize, detail::MemoryType &used, std::condition_variable &nonempty)
Inserts element into the ConcurrentQueue.
void deqWait(ValueType &item)
Assign the value of the head of the queue to item and then remove the head of the queue...
SizeType capacity() const
Return the capacity of the queue, that is, the maximum number of items it can contain.
void setReaderIsReady(bool rdy=true)
Set the ready flag for the reader.
EnqPolicy::ValueType ValueType
Type of values stored in ConcurrentQueue.
SequenceType::size_type SizeType
Type for indexes in sequence.
static ReturnType doEnq(T const &item, SequenceType &elements, SizeType &size, SizeType &capacity, detail::MemoryType &used, detail::MemoryType &memory, size_t &elementsDropped, std::condition_variable &nonempty)
Attempts to enqueue an item.
bool queueReaderIsReady() const
Is the reader connected and ready for items to appear on the queue?
bool ReturnType
Type returned by doEnq.
SizeType size() const