artdaq_core  v1_06_01
 All Classes Namespaces Functions
ConcurrentQueue.hh
1 
2 #ifndef artdaq_core_Core_ConcurrentQueue_hh
3 #define artdaq_core_Core_ConcurrentQueue_hh
4 
5 #include <algorithm>
6 #include <cstddef>
7 #include <exception>
8 #include <limits>
9 #include <list>
10 
11 #include <iostream> // debugging
12 
13 #include <chrono>
14 #include <condition_variable>
15 #include <mutex>
16 #include <type_traits>
17 
18 // #include <boost/date_time/posix_time/posix_time_types.hpp>
19 // #include <boost/utility/enable_if.hpp>
20 // #include <boost/thread/condition.hpp>
21 // #include <boost/thread/mutex.hpp>
22 // #include <boost/thread/xtime.hpp>
23 
24 namespace daqrate {
25  // We shall use daqrate::seconds as our "standard" duration
26  // type. Note that this differs from std::chrono::seconds, which has
27  // a representation in some integer type of at least 35 bits.
28  //
29  // daqrate::duration dur(1.0) represents a duration of 1 second.
30  // daqrate::duration dur2(0.001) represents a duration of 1
31  // millisecond.
32  typedef std::chrono::duration<double> seconds;
33 
58  namespace detail {
59  typedef size_t MemoryType;
60 
61  /*
62  This template is using SFINAE to figure out if the class used to
63  instantiate the ConcurrentQueue template has a method memoryUsed
64  returning the number of bytes occupied by the class itself.
65  */
66  template <typename T>
67  class hasMemoryUsed {
68  typedef char TrueType;
69  struct FalseType { TrueType _[2]; };
70 
71  template <MemoryType(T:: *)() const>
72  struct TestConst;
73 
74  template <typename C>
75  static TrueType test(TestConst<&C::memoryUsed> *);
76  template <typename C>
77  static FalseType test(...);
78 
79  public:
80  static const bool value = (sizeof(test<T>(0)) == sizeof(TrueType));
81  };
82 
83  template <typename T>
84  MemoryType
85  memoryUsage(const std::pair<T, size_t> & t)
86  {
87  MemoryType usage(0UL);
88  try {
89  usage = t.first.memoryUsed();
90  }
91  catch (...)
92  {}
93  return usage;
94  }
95 
96  template <typename T>
97  typename std::enable_if<hasMemoryUsed<T>::value, MemoryType>::type
98  memoryUsage(const T & t)
99  {
100  MemoryType usage(0UL);
101  try {
102  usage = t.memoryUsed();
103  }
104  catch (...)
105  {}
106  return usage;
107  }
108 
109  template <typename T>
110  typename std::enable_if < !hasMemoryUsed<T>::value, MemoryType >::type
111  memoryUsage(const T & t)
112  { return sizeof(t); }
113 
114  }// end namespace detail
115 
116 
117  template <class T>
118  struct FailIfFull {
119  typedef void ReturnType;
120 
121  typedef T ValueType;
122  typedef std::list<T> SequenceType;
123  typedef typename SequenceType::size_type SizeType;
124 
125  static struct QueueIsFull : public std::exception {
126  virtual const char * what() const throw() {
127  return "Cannot add item to a full queue";
128  }
129  } queueIsFull;
130 
131  static void doInsert
132  (
133  T const & item,
134  SequenceType & elements,
135  SizeType & size,
136  detail::MemoryType const & itemSize,
137  detail::MemoryType & used,
138  std::condition_variable & nonempty
139  ) {
140  elements.push_back(item);
141  ++size;
142  used += itemSize;
143  nonempty.notify_one();
144  }
145 
146  static ReturnType doEnq
147  (
148  T const & item,
149  SequenceType & elements,
150  SizeType & size,
151  SizeType & capacity,
152  detail::MemoryType & used,
153  detail::MemoryType & memory,
154  size_t & elementsDropped,
155  std::condition_variable & nonempty
156  ) {
157  detail::MemoryType itemSize = detail::memoryUsage(item);
158  if (size >= capacity || used + itemSize > memory) {
159  ++elementsDropped;
160  throw queueIsFull;
161  }
162  else {
163  doInsert(item, elements, size, itemSize, used, nonempty);
164  }
165  }
166  };
167 
168  template<typename T>
169  typename FailIfFull<T>::QueueIsFull FailIfFull<T>::queueIsFull {};
170 
171  template <class T>
172  struct KeepNewest {
173  typedef std::pair<T, size_t> ValueType;
174  typedef std::list<T> SequenceType;
175  typedef typename SequenceType::size_type SizeType;
176  typedef SizeType ReturnType;
177 
178  static void doInsert
179  (
180  T const & item,
181  SequenceType & elements,
182  SizeType & size,
183  detail::MemoryType const & itemSize,
184  detail::MemoryType & used,
185  std::condition_variable & nonempty
186  ) {
187  elements.push_back(item);
188  ++size;
189  used += itemSize;
190  nonempty.notify_one();
191  }
192 
193  static ReturnType doEnq
194  (
195  T const & item,
196  SequenceType & elements,
197  SizeType & size,
198  SizeType & capacity,
199  detail::MemoryType & used,
200  detail::MemoryType & memory,
201  size_t & elementsDropped,
202  std::condition_variable & nonempty
203  ) {
204  SizeType elementsRemoved(0);
205  detail::MemoryType itemSize = detail::memoryUsage(item);
206  while ((size == capacity || used + itemSize > memory) && !elements.empty()) {
207  SequenceType holder;
208  // Move the item out of elements in a manner that will not throw.
209  holder.splice(holder.begin(), elements, elements.begin());
210  // Record the change in the length of elements.
211  --size;
212  used -= detail::memoryUsage(holder.front());
213  ++elementsRemoved;
214  }
215  if (size < capacity && used + itemSize <= memory)
216  // we succeeded to make enough room for the new element
217  {
218  doInsert(item, elements, size, itemSize, used, nonempty);
219  }
220  else {
221  // we cannot add the new element
222  ++elementsRemoved;
223  }
224  elementsDropped += elementsRemoved;
225  return elementsRemoved;
226  }
227  };
228 
229 
230  template <class T>
231  struct RejectNewest {
232  typedef std::pair<T, size_t> ValueType;
233  typedef std::list<T> SequenceType;
234  typedef typename SequenceType::size_type SizeType;
235  typedef SizeType ReturnType;
236 
237  static void doInsert
238  (
239  T const & item,
240  SequenceType & elements,
241  SizeType & size,
242  detail::MemoryType const & itemSize,
243  detail::MemoryType & used,
244  std::condition_variable & nonempty
245  ) {
246  elements.push_back(item);
247  ++size;
248  used += itemSize;
249  nonempty.notify_one();
250  }
251 
252  static ReturnType doEnq
253  (
254  T const & item,
255  SequenceType & elements,
256  SizeType & size,
257  SizeType & capacity,
258  detail::MemoryType & used,
259  detail::MemoryType & memory,
260  size_t & elementsDropped,
261  std::condition_variable & nonempty
262  ) {
263  detail::MemoryType itemSize = detail::memoryUsage(item);
264  if (size < capacity && used + itemSize <= memory) {
265  doInsert(item, elements, size, itemSize, used, nonempty);
266  return 0;
267  }
268  ++elementsDropped;
269  return 1;
270  }
271  };
272 
277  template <class T, class EnqPolicy = FailIfFull<T> >
279  public:
280  typedef typename EnqPolicy::ValueType ValueType;
281  typedef typename EnqPolicy::SequenceType SequenceType;
282  typedef typename SequenceType::size_type SizeType;
283 
288  explicit ConcurrentQueue
289  (
290  SizeType maxSize = std::numeric_limits<SizeType>::max(),
291  detail::MemoryType maxMemory = std::numeric_limits<detail::MemoryType>::max()
292  );
293 
301 
314  typename EnqPolicy::ReturnType enqNowait(T const & item);
315 
321  void enqWait(T const & p);
322 
330  bool enqTimedWait(T const & p, seconds const &);
331 
339  bool deqNowait(ValueType &);
340 
347  void deqWait(ValueType &);
348 
357  bool deqTimedWait(ValueType &, seconds const &);
358 
362  bool empty() const;
363 
367  bool full() const;
368 
373  SizeType size() const;
374 
379  SizeType capacity() const;
380 
386  bool setCapacity(SizeType n);
387 
391  detail::MemoryType used() const;
392 
397  detail::MemoryType memory() const;
398 
405  bool setMemory(detail::MemoryType n);
406 
412  SizeType clear();
413 
417  void addExternallyDroppedEvents(SizeType);
418 
422  bool queueReaderIsReady() { return readerReady_; }
423 
427  void setReaderIsReady(bool rdy = true) {
428  readyTime_ = std::chrono::steady_clock::now();
429  readerReady_ = rdy;
430  }
431 
435  std::chrono::steady_clock::time_point getReadyTime() { return readyTime_; }
436 
437  private:
438  typedef std::lock_guard<std::mutex> LockType;
439  typedef std::unique_lock<std::mutex> WaitLockType;
440 
441  mutable std::mutex protectElements_;
442  mutable std::condition_variable queueNotEmpty_;
443  mutable std::condition_variable queueNotFull_;
444 
445  std::chrono::steady_clock::time_point readyTime_;
446  bool readerReady_;
447  SequenceType elements_;
448  SizeType capacity_;
449  SizeType size_;
450  /*
451  N.B.: we rely on SizeType *not* being some synthesized large
452  type, so that reading the value is an atomic action, as is
453  incrementing or decrementing the value. We do *not* assume that
454  there is any atomic getAndIncrement or getAndDecrement
455  operation.
456  */
457  detail::MemoryType memory_;
458  detail::MemoryType used_;
459  size_t elementsDropped_;
460 
461  /*
462  These private member functions assume that whatever locks
463  necessary for safe operation have already been obtained.
464  */
465 
466  /*
467  Insert the given item into the list, if it is not already full,
468  and increment size. Return true if the item is inserted, and
469  false if not.
470  */
471  bool insertIfPossible(T const & item);
472 
473  /*
474  Remove the object at the head of the queue, if there is one, and
475  assign item the value of this object.The assignment may throw an
476  exception; even if it does, the head will have been removed from
477  the queue, and the size appropriately adjusted. It is assumed
478  the queue is nonempty. Return true if the queue was nonempty,
479  and false if the queue was empty.
480  */
481  bool removeHeadIfPossible(ValueType & item);
482 
483  /*
484  Remove the object at the head of the queue, and assign item the
485  value of this object. The assignment may throw an exception;
486  even if it does, the head will have been removed from the queue,
487  and the size appropriately adjusted. It is assumed the queue is
488  nonempty.
489  */
490  void removeHead(ValueType & item);
491 
492  void assignItem(T & item, const T & element);
493  void assignItem(std::pair<T, size_t> & item, const T & element);
494 
495  /*
496  Return false if the queue can accept new entries.
497  */
498  bool isFull() const;
499 
500  /*
501  These functions are declared private and not implemented to
502  prevent their use.
503  */
505  ConcurrentQueue & operator=(ConcurrentQueue<T, EnqPolicy> const &);
506  };
507 
508  //------------------------------------------------------------------
509  // Implementation follows
510  //------------------------------------------------------------------
511 
512  template <class T, class EnqPolicy>
514  (
515  SizeType maxSize,
516  detail::MemoryType maxMemory
517  ) :
518  protectElements_(),
519  readyTime_(std::chrono::steady_clock::now()),
520  readerReady_(false),
521  elements_(),
522  capacity_(maxSize),
523  size_(0),
524  memory_(maxMemory),
525  used_(0),
526  elementsDropped_(0)
527  {}
528 
529  template <class T, class EnqPolicy>
531  {
532  LockType lock(protectElements_);
533  elements_.clear();
534  size_ = 0;
535  used_ = 0;
536  elementsDropped_ = 0;
537  }
538 
539  template <class T, class EnqPolicy>
540  typename EnqPolicy::ReturnType
542  {
543  LockType lock(protectElements_);
544  return EnqPolicy::doEnq
545  (item, elements_, size_, capacity_, used_, memory_,
546  elementsDropped_, queueNotEmpty_);
547  }
548 
549  template <class T, class EnqPolicy>
550  void
552  {
553  WaitLockType lock(protectElements_);
554  while (isFull()) { queueNotFull_.wait(lock); }
555  EnqPolicy::doInsert(item, elements_, size_,
556  detail::memoryUsage(item), used_, queueNotEmpty_);
557  }
558 
559  template <class T, class EnqPolicy>
560  bool
562  (
563  T const & item,
564  seconds const & waitTime
565  )
566  {
567  WaitLockType lock(protectElements_);
568  if (isFull()) {
569  queueNotFull_.wait_for(lock, waitTime);
570  }
571  return insertIfPossible(item);
572  }
573 
574  template <class T, class EnqPolicy>
575  bool
577  {
578  LockType lock(protectElements_);
579  return removeHeadIfPossible(item);
580  }
581 
582  template <class T, class EnqPolicy>
583  void
585  {
586  WaitLockType lock(protectElements_);
587  while (size_ == 0) { queueNotEmpty_.wait(lock); }
588  removeHead(item);
589  }
590 
591  template <class T, class EnqPolicy>
592  bool
594  (
595  ValueType & item,
596  seconds const & waitTime
597  )
598  {
599  WaitLockType lock(protectElements_);
600  if (size_ == 0) {
601  queueNotEmpty_.wait_for(lock, waitTime);
602  }
603  return removeHeadIfPossible(item);
604  }
605 
606  template <class T, class EnqPolicy>
607  bool
609  {
610  // No lock is necessary: the read is atomic.
611  return size_ == 0;
612  }
613 
614  template <class T, class EnqPolicy>
615  bool
617  {
618  LockType lock(protectElements_);
619  return isFull();
620  }
621 
622  template <class T, class EnqPolicy>
623  typename ConcurrentQueue<T, EnqPolicy>::SizeType
625  {
626  // No lock is necessary: the read is atomic.
627  return size_;
628  }
629 
630  template <class T, class EnqPolicy>
631  typename ConcurrentQueue<T, EnqPolicy>::SizeType
633  {
634  // No lock is necessary: the read is atomic.
635  return capacity_;
636  }
637 
638  template <class T, class EnqPolicy>
639  bool
641  {
642  LockType lock(protectElements_);
643  bool isEmpty = (size_ == 0);
644  if (isEmpty) { capacity_ = newcapacity; }
645  return isEmpty;
646  }
647 
648  template <class T, class EnqPolicy>
649  detail::MemoryType
651  {
652  // No lock is necessary: the read is atomic.
653  return used_;
654  }
655 
656  template <class T, class EnqPolicy>
657  detail::MemoryType
659  {
660  // No lock is necessary: the read is atomic.
661  return memory_;
662  }
663 
664  template <class T, class EnqPolicy>
665  bool
666  ConcurrentQueue<T, EnqPolicy>::setMemory(detail::MemoryType newmemory)
667  {
668  LockType lock(protectElements_);
669  bool isEmpty = (size_ == 0);
670  if (isEmpty) { memory_ = newmemory; }
671  return isEmpty;
672  }
673 
674  template <class T, class EnqPolicy>
675  typename ConcurrentQueue<T, EnqPolicy>::SizeType
677  {
678  LockType lock(protectElements_);
679  SizeType clearedEvents = size_;
680  elementsDropped_ += size_;
681  elements_.clear();
682  size_ = 0;
683  used_ = 0;
684  return clearedEvents;
685  }
686 
687  template <class T, class EnqPolicy>
688  void
690  {
691  LockType lock(protectElements_);
692  elementsDropped_ += n;
693  }
694 
695  //-----------------------------------------------------------
696  // Private member functions
697  //-----------------------------------------------------------
698 
699  template <class T, class EnqPolicy>
700  bool
702  {
703  if (isFull()) {
704  ++elementsDropped_;
705  return false;
706  }
707  else {
708  EnqPolicy::doInsert(item, elements_, size_,
709  detail::memoryUsage(item), used_, queueNotEmpty_);
710  return true;
711  }
712  }
713 
714  template <class T, class EnqPolicy>
715  bool
716  ConcurrentQueue<T, EnqPolicy>::removeHeadIfPossible(ValueType & item)
717  {
718  if (size_ == 0) { return false; }
719  removeHead(item);
720  return true;
721  }
722 
723  template <class T, class EnqPolicy>
724  void
725  ConcurrentQueue<T, EnqPolicy>::removeHead(ValueType & item)
726  {
727  SequenceType holder;
728  // Move the item out of elements_ in a manner that will not throw.
729  holder.splice(holder.begin(), elements_, elements_.begin());
730  // Record the change in the length of elements_.
731  --size_;
732  queueNotFull_.notify_one();
733  assignItem(item, holder.front());
734  used_ -= detail::memoryUsage(item);
735  }
736 
737  template <class T, class EnqPolicy>
738  void
739  ConcurrentQueue<T, EnqPolicy>::assignItem(T & item, const T & element)
740  {
741  item = element;
742  }
743 
744  template <class T, class EnqPolicy>
745  void
746  ConcurrentQueue<T, EnqPolicy>::assignItem(std::pair<T, size_t> & item, const T & element)
747  {
748  item.first = element;
749  item.second = elementsDropped_;
750  elementsDropped_ = 0;
751  }
752 
753  template <class T, class EnqPolicy>
754  bool
755  ConcurrentQueue<T, EnqPolicy>::isFull() const
756  {
757  if (size_ >= capacity_ || used_ >= memory_) { return true; }
758  return false;
759  }
760 
761 } // namespace daqrate
762 
763 #endif /* artdaq_core_Core_ConcurrentQueue_hh */
764 
766 
772 
bool enqTimedWait(T const &p, seconds const &)
bool setMemory(detail::MemoryType n)
void addExternallyDroppedEvents(SizeType)
ConcurrentQueue(SizeType maxSize=std::numeric_limits< SizeType >::max(), detail::MemoryType maxMemory=std::numeric_limits< detail::MemoryType >::max())
std::chrono::steady_clock::time_point getReadyTime()
detail::MemoryType memory() const
void setReaderIsReady(bool rdy=true)
bool deqNowait(ValueType &)
EnqPolicy::ReturnType enqNowait(T const &item)
bool deqTimedWait(ValueType &, seconds const &)
SizeType capacity() const
void deqWait(ValueType &)
bool setCapacity(SizeType n)
detail::MemoryType used() const