artdaq_core  v1_07_01
 All Classes Namespaces Functions
ConcurrentQueue.hh
1 
2 #ifndef artdaq_core_Core_ConcurrentQueue_hh
3 #define artdaq_core_Core_ConcurrentQueue_hh
4 
5 #include <algorithm>
6 #include <cstddef>
7 #include <exception>
8 #include <limits>
9 #include <list>
10 
11 #include <iostream> // debugging
12 #include "trace.h" // TRACE
13 
14 #include <chrono>
15 #include <condition_variable>
16 #include <mutex>
17 #include <type_traits>
18 
19 // #include <boost/date_time/posix_time/posix_time_types.hpp>
20 // #include <boost/utility/enable_if.hpp>
21 // #include <boost/thread/condition.hpp>
22 // #include <boost/thread/mutex.hpp>
23 // #include <boost/thread/xtime.hpp>
24 
25 namespace daqrate {
26  // We shall use daqrate::seconds as our "standard" duration
27  // type. Note that this differs from std::chrono::seconds, which has
28  // a representation in some integer type of at least 35 bits.
29  //
30  // daqrate::duration dur(1.0) represents a duration of 1 second.
31  // daqrate::duration dur2(0.001) represents a duration of 1
32  // millisecond.
33  typedef std::chrono::duration<double> seconds;
34 
59  namespace detail {
60  typedef size_t MemoryType;
61 
62  /*
63  This template is using SFINAE to figure out if the class used to
64  instantiate the ConcurrentQueue template has a method memoryUsed
65  returning the number of bytes occupied by the class itself.
66  */
67  template <typename T>
68  class hasMemoryUsed {
69  typedef char TrueType;
70  struct FalseType { TrueType _[2]; };
71 
72  template <MemoryType(T:: *)() const>
73  struct TestConst;
74 
75  template <typename C>
76  static TrueType test(TestConst<&C::memoryUsed> *);
77  template <typename C>
78  static FalseType test(...);
79 
80  public:
81  static const bool value = (sizeof(test<T>(0)) == sizeof(TrueType));
82  };
83 
84  template <typename T>
85  MemoryType
86  memoryUsage(const std::pair<T, size_t> & t)
87  {
88  MemoryType usage(0UL);
89  try {
90  usage = t.first.memoryUsed();
91  }
92  catch (...)
93  {}
94  return usage;
95  }
96 
97  template <typename T>
98  typename std::enable_if<hasMemoryUsed<T>::value, MemoryType>::type
99  memoryUsage(const T & t)
100  {
101  MemoryType usage(0UL);
102  try {
103  usage = t.memoryUsed();
104  }
105  catch (...)
106  {}
107  return usage;
108  }
109 
110  template <typename T>
111  typename std::enable_if < !hasMemoryUsed<T>::value, MemoryType >::type
112  memoryUsage(const T & t)
113  { return sizeof(t); }
114 
115  }// end namespace detail
116 
117 
118  template <class T>
119  struct FailIfFull {
120  typedef bool ReturnType;
121 
122  typedef T ValueType;
123  typedef std::list<T> SequenceType;
124  typedef typename SequenceType::size_type SizeType;
125 
126  static struct QueueIsFull : public std::exception {
127  virtual const char * what() const throw() {
128  return "Cannot add item to a full queue";
129  }
130  } queueIsFull;
131 
132  static void doInsert
133  (
134  T const & item,
135  SequenceType & elements,
136  SizeType & size,
137  detail::MemoryType const & itemSize,
138  detail::MemoryType & used,
139  std::condition_variable & nonempty
140  ) {
141  elements.push_back(item);
142  ++size;
143  used += itemSize;
144  nonempty.notify_one();
145  }
146 
147  static ReturnType doEnq
148  (
149  T const & item,
150  SequenceType & elements,
151  SizeType & size,
152  SizeType & capacity,
153  detail::MemoryType & used,
154  detail::MemoryType & memory,
155  size_t & elementsDropped,
156  std::condition_variable & nonempty
157  ) {
158  detail::MemoryType itemSize = detail::memoryUsage(item);
159  if (size >= capacity || used + itemSize > memory) {
160  ++elementsDropped;
161  throw queueIsFull;
162  }
163  else {
164  doInsert(item, elements, size, itemSize, used, nonempty);
165  }
166  return true;
167  }
168  };
169 
170  template<typename T>
171  typename FailIfFull<T>::QueueIsFull FailIfFull<T>::queueIsFull {};
172 
173  template <class T>
174  struct KeepNewest {
175  typedef std::pair<T, size_t> ValueType;
176  typedef std::list<T> SequenceType;
177  typedef typename SequenceType::size_type SizeType;
178  typedef SizeType ReturnType;
179 
180  static void doInsert
181  (
182  T const & item,
183  SequenceType & elements,
184  SizeType & size,
185  detail::MemoryType const & itemSize,
186  detail::MemoryType & used,
187  std::condition_variable & nonempty
188  ) {
189  elements.push_back(item);
190  ++size;
191  used += itemSize;
192  nonempty.notify_one();
193  }
194 
195  static ReturnType doEnq
196  (
197  T const & item,
198  SequenceType & elements,
199  SizeType & size,
200  SizeType & capacity,
201  detail::MemoryType & used,
202  detail::MemoryType & memory,
203  size_t & elementsDropped,
204  std::condition_variable & nonempty
205  ) {
206  SizeType elementsRemoved(0);
207  detail::MemoryType itemSize = detail::memoryUsage(item);
208  while ((size == capacity || used + itemSize > memory) && !elements.empty()) {
209  SequenceType holder;
210  // Move the item out of elements in a manner that will not throw.
211  holder.splice(holder.begin(), elements, elements.begin());
212  // Record the change in the length of elements.
213  --size;
214  used -= detail::memoryUsage(holder.front());
215  ++elementsRemoved;
216  }
217  if (size < capacity && used + itemSize <= memory)
218  // we succeeded to make enough room for the new element
219  {
220  doInsert(item, elements, size, itemSize, used, nonempty);
221  }
222  else {
223  // we cannot add the new element
224  ++elementsRemoved;
225  }
226  elementsDropped += elementsRemoved;
227  return elementsRemoved;
228  }
229  };
230 
231 
232  template <class T>
233  struct RejectNewest {
234  typedef std::pair<T, size_t> ValueType;
235  typedef std::list<T> SequenceType;
236  typedef typename SequenceType::size_type SizeType;
237  typedef SizeType ReturnType;
238 
239  static void doInsert
240  (
241  T const & item,
242  SequenceType & elements,
243  SizeType & size,
244  detail::MemoryType const & itemSize,
245  detail::MemoryType & used,
246  std::condition_variable & nonempty
247  ) {
248  elements.push_back(item);
249  ++size;
250  used += itemSize;
251  nonempty.notify_one();
252  }
253 
254  static ReturnType doEnq
255  (
256  T const & item,
257  SequenceType & elements,
258  SizeType & size,
259  SizeType & capacity,
260  detail::MemoryType & used,
261  detail::MemoryType & memory,
262  size_t & elementsDropped,
263  std::condition_variable & nonempty
264  ) {
265  detail::MemoryType itemSize = detail::memoryUsage(item);
266  if (size < capacity && used + itemSize <= memory) {
267  doInsert(item, elements, size, itemSize, used, nonempty);
268  return 0;
269  }
270  ++elementsDropped;
271  return 1;
272  }
273  };
274 
279  template <class T, class EnqPolicy = FailIfFull<T> >
281  public:
282  typedef typename EnqPolicy::ValueType ValueType;
283  typedef typename EnqPolicy::SequenceType SequenceType;
284  typedef typename SequenceType::size_type SizeType;
285 
290  explicit ConcurrentQueue
291  (
292  SizeType maxSize = std::numeric_limits<SizeType>::max(),
293  detail::MemoryType maxMemory = std::numeric_limits<detail::MemoryType>::max()
294  );
295 
303 
316  typename EnqPolicy::ReturnType enqNowait(T const & item);
317 
323  void enqWait(T const & p);
324 
332  bool enqTimedWait(T const & p, seconds const &);
333 
341  bool deqNowait(ValueType &);
342 
349  void deqWait(ValueType &);
350 
359  bool deqTimedWait(ValueType &, seconds const &);
360 
364  bool empty() const;
365 
369  bool full() const;
370 
375  SizeType size() const;
376 
381  SizeType capacity() const;
382 
388  bool setCapacity(SizeType n);
389 
393  detail::MemoryType used() const;
394 
399  detail::MemoryType memory() const;
400 
407  bool setMemory(detail::MemoryType n);
408 
414  SizeType clear();
415 
419  void addExternallyDroppedEvents(SizeType);
420 
424  bool queueReaderIsReady() { return readerReady_; }
425 
429  void setReaderIsReady(bool rdy = true) {
430  readyTime_ = std::chrono::steady_clock::now();
431  readerReady_ = rdy;
432  }
433 
437  std::chrono::steady_clock::time_point getReadyTime() { return readyTime_; }
438 
439  private:
440  typedef std::lock_guard<std::mutex> LockType;
441  typedef std::unique_lock<std::mutex> WaitLockType;
442 
443  mutable std::mutex protectElements_;
444  mutable std::condition_variable queueNotEmpty_;
445  mutable std::condition_variable queueNotFull_;
446 
447  std::chrono::steady_clock::time_point readyTime_;
448  bool readerReady_;
449  SequenceType elements_;
450  SizeType capacity_;
451  SizeType size_;
452  /*
453  N.B.: we rely on SizeType *not* being some synthesized large
454  type, so that reading the value is an atomic action, as is
455  incrementing or decrementing the value. We do *not* assume that
456  there is any atomic getAndIncrement or getAndDecrement
457  operation.
458  */
459  detail::MemoryType memory_;
460  detail::MemoryType used_;
461  size_t elementsDropped_;
462 
463  /*
464  These private member functions assume that whatever locks
465  necessary for safe operation have already been obtained.
466  */
467 
468  /*
469  Insert the given item into the list, if it is not already full,
470  and increment size. Return true if the item is inserted, and
471  false if not.
472  */
473  bool insertIfPossible(T const & item);
474 
475  /*
476  Remove the object at the head of the queue, if there is one, and
477  assign item the value of this object.The assignment may throw an
478  exception; even if it does, the head will have been removed from
479  the queue, and the size appropriately adjusted. It is assumed
480  the queue is nonempty. Return true if the queue was nonempty,
481  and false if the queue was empty.
482  */
483  bool removeHeadIfPossible(ValueType & item);
484 
485  /*
486  Remove the object at the head of the queue, and assign item the
487  value of this object. The assignment may throw an exception;
488  even if it does, the head will have been removed from the queue,
489  and the size appropriately adjusted. It is assumed the queue is
490  nonempty.
491  */
492  void removeHead(ValueType & item);
493 
494  void assignItem(T & item, const T & element);
495  void assignItem(std::pair<T, size_t> & item, const T & element);
496 
497  /*
498  Return false if the queue can accept new entries.
499  */
500  bool isFull() const;
501 
502  /*
503  These functions are declared private and not implemented to
504  prevent their use.
505  */
507  ConcurrentQueue & operator=(ConcurrentQueue<T, EnqPolicy> const &);
508  };
509 
510  //------------------------------------------------------------------
511  // Implementation follows
512  //------------------------------------------------------------------
513 
514  template <class T, class EnqPolicy>
516  (
517  SizeType maxSize,
518  detail::MemoryType maxMemory
519  ) :
520  protectElements_(),
521  readyTime_(std::chrono::steady_clock::now()),
522  readerReady_(false),
523  elements_(),
524  capacity_(maxSize),
525  size_(0),
526  memory_(maxMemory),
527  used_(0),
528  elementsDropped_(0)
529  {}
530 
531  template <class T, class EnqPolicy>
533  {
534  LockType lock(protectElements_);
535  elements_.clear();
536  size_ = 0;
537  used_ = 0;
538  elementsDropped_ = 0;
539  }
540 
541 
542  // enqueue methods - 3 - enqNowait, enqWait, enqTimedWait
543 
544  template <class T, class EnqPolicy>
545  typename EnqPolicy::ReturnType ConcurrentQueue<T, EnqPolicy>::enqNowait(T const & item)
546  {
547  TRACE( 12, "ConcurrentQueue<T,EnqPolicy>::enqNowait enter size=%zu capacity=%zu used=%zu memory=%zu", size_, capacity_, used_, memory_ );
548  LockType lock(protectElements_);
549  auto retval = EnqPolicy::doEnq(item, elements_, size_, capacity_, used_, memory_,
550  elementsDropped_, queueNotEmpty_);
551  TRACE( 12, "ConcurrentQueue<T,EnqPolicy>::enqNowait returning %zu", (SizeType)retval );
552  return retval;
553  }
554 
555  template <class T, class EnqPolicy>
557  {
558  TRACE( 12, "ConcurrentQueue<T,EnqPolicy>::enqWait enter" );
559  WaitLockType lock(protectElements_);
560  while (isFull()) { queueNotFull_.wait(lock); }
561  EnqPolicy::doInsert(item, elements_, size_,
562  detail::memoryUsage(item), used_, queueNotEmpty_);
563  TRACE( 12, "ConcurrentQueue<T,EnqPolicy>::enqWait returning" );
564  }
565 
566  template <class T, class EnqPolicy>
567  bool ConcurrentQueue<T,EnqPolicy>::enqTimedWait( T const & item, seconds const & waitTime )
568  {
569  TRACE( 12, "ConcurrentQueue<T,EnqPolicy>::enqTimedWait enter with waitTime=%ld ms size=%zu capacity=%zu used=%zu memory=%zu"
570  , std::chrono::duration_cast<std::chrono::milliseconds>(waitTime).count(), size_, capacity_, used_, memory_ );
571  WaitLockType lock(protectElements_);
572  if (isFull()) {
573  queueNotFull_.wait_for(lock, waitTime);
574  }
575  bool retval=insertIfPossible(item);
576  TRACE( 12, "ConcurrentQueue<T,EnqPolicy>::enqTimedWait returning %d", retval );
577  return retval;
578  }
579 
580 
581  // dequeue methods - 3 - deqNowait, deqWait, deqTimedWait
582 
583  template <class T, class EnqPolicy>
585  {
586  TRACE( 12, "ConcurrentQueue<T, EnqPolicy>::deqNowait enter" );
587  LockType lock(protectElements_);
588  bool retval = removeHeadIfPossible(item);
589  TRACE( 12, "ConcurrentQueue<T, EnqPolicy>::deqNowait returning %d", retval );
590  return retval;
591  }
592 
593  template <class T, class EnqPolicy>
594  void ConcurrentQueue<T, EnqPolicy>::deqWait( ValueType & item )
595  {
596  TRACE( 12, "ConcurrentQueue<T, EnqPolicy>::deqWait enter" );
597  WaitLockType lock(protectElements_);
598  while (size_ == 0) { queueNotEmpty_.wait(lock); }
599  removeHead(item);
600  TRACE( 12, "ConcurrentQueue<T, EnqPolicy>::deqWait returning" );
601  }
602 
603  template <class T, class EnqPolicy>
604  bool ConcurrentQueue<T, EnqPolicy>::deqTimedWait( ValueType & item, seconds const & waitTime )
605  {
606  TRACE( 12, "ConcurrentQueue<T, EnqPolicy>::deqTimedWait enter with waitTime=%ld ms size=%zu"
607  , std::chrono::duration_cast<std::chrono::milliseconds>(waitTime).count(), size_ );
608  WaitLockType lock(protectElements_);
609  if (size_ == 0) {
610  queueNotEmpty_.wait_for(lock, waitTime);
611  }
612  bool retval=removeHeadIfPossible(item);
613  TRACE( 12, "ConcurrentQueue<T, EnqPolicy>::deqTimedWait returning %d size=%zu", retval, size_ );
614  return retval;
615  }
616 
617 
618 
619  template <class T, class EnqPolicy>
620  bool
622  {
623  // No lock is necessary: the read is atomic.
624  return size_ == 0;
625  }
626 
627  template <class T, class EnqPolicy>
628  bool
630  {
631  LockType lock(protectElements_);
632  return isFull();
633  }
634 
635  template <class T, class EnqPolicy>
636  typename ConcurrentQueue<T, EnqPolicy>::SizeType
638  {
639  // No lock is necessary: the read is atomic.
640  return size_;
641  }
642 
643  template <class T, class EnqPolicy>
644  typename ConcurrentQueue<T, EnqPolicy>::SizeType
646  {
647  // No lock is necessary: the read is atomic.
648  return capacity_;
649  }
650 
651  template <class T, class EnqPolicy>
652  bool
654  {
655  LockType lock(protectElements_);
656  bool isEmpty = (size_ == 0);
657  if (isEmpty) { capacity_ = newcapacity; }
658  return isEmpty;
659  }
660 
661  template <class T, class EnqPolicy>
662  detail::MemoryType
664  {
665  // No lock is necessary: the read is atomic.
666  return used_;
667  }
668 
669  template <class T, class EnqPolicy>
670  detail::MemoryType
672  {
673  // No lock is necessary: the read is atomic.
674  return memory_;
675  }
676 
677  template <class T, class EnqPolicy>
678  bool
679  ConcurrentQueue<T, EnqPolicy>::setMemory(detail::MemoryType newmemory)
680  {
681  LockType lock(protectElements_);
682  bool isEmpty = (size_ == 0);
683  if (isEmpty) { memory_ = newmemory; }
684  return isEmpty;
685  }
686 
687  template <class T, class EnqPolicy>
688  typename ConcurrentQueue<T, EnqPolicy>::SizeType
690  {
691  LockType lock(protectElements_);
692  SizeType clearedEvents = size_;
693  elementsDropped_ += size_;
694  elements_.clear();
695  size_ = 0;
696  used_ = 0;
697  return clearedEvents;
698  }
699 
700  template <class T, class EnqPolicy>
701  void
703  {
704  LockType lock(protectElements_);
705  elementsDropped_ += n;
706  }
707 
708  //-----------------------------------------------------------
709  // Private member functions
710  //-----------------------------------------------------------
711 
712  template <class T, class EnqPolicy>
713  bool
715  {
716  if (isFull()) {
717  ++elementsDropped_;
718  return false;
719  }
720  else {
721  EnqPolicy::doInsert(item, elements_, size_,
722  detail::memoryUsage(item), used_, queueNotEmpty_);
723  return true;
724  }
725  }
726 
727  template <class T, class EnqPolicy>
728  bool
729  ConcurrentQueue<T, EnqPolicy>::removeHeadIfPossible(ValueType & item)
730  {
731  if (size_ == 0) { return false; }
732  removeHead(item);
733  return true;
734  }
735 
736  template <class T, class EnqPolicy>
737  void
738  ConcurrentQueue<T, EnqPolicy>::removeHead(ValueType & item)
739  {
740  SequenceType holder;
741  // Move the item out of elements_ in a manner that will not throw.
742  holder.splice(holder.begin(), elements_, elements_.begin());
743  // Record the change in the length of elements_.
744  --size_;
745  queueNotFull_.notify_one();
746  assignItem(item, holder.front());
747  used_ -= detail::memoryUsage(item);
748  }
749 
750  template <class T, class EnqPolicy>
751  void
752  ConcurrentQueue<T, EnqPolicy>::assignItem(T & item, const T & element)
753  {
754  item = element;
755  }
756 
757  template <class T, class EnqPolicy>
758  void
759  ConcurrentQueue<T, EnqPolicy>::assignItem(std::pair<T, size_t> & item, const T & element)
760  {
761  item.first = element;
762  item.second = elementsDropped_;
763  elementsDropped_ = 0;
764  }
765 
766  template <class T, class EnqPolicy>
767  bool
768  ConcurrentQueue<T, EnqPolicy>::isFull() const
769  {
770  if (size_ >= capacity_ || used_ >= memory_) { return true; }
771  return false;
772  }
773 
774 } // namespace daqrate
775 
776 #endif /* artdaq_core_Core_ConcurrentQueue_hh */
777 
779 
785 
bool enqTimedWait(T const &p, seconds const &)
bool setMemory(detail::MemoryType n)
void addExternallyDroppedEvents(SizeType)
ConcurrentQueue(SizeType maxSize=std::numeric_limits< SizeType >::max(), detail::MemoryType maxMemory=std::numeric_limits< detail::MemoryType >::max())
std::chrono::steady_clock::time_point getReadyTime()
detail::MemoryType memory() const
void setReaderIsReady(bool rdy=true)
bool deqNowait(ValueType &)
EnqPolicy::ReturnType enqNowait(T const &item)
bool deqTimedWait(ValueType &, seconds const &)
SizeType capacity() const
void deqWait(ValueType &)
bool setCapacity(SizeType n)
detail::MemoryType used() const