libjmmcg  release_579_6_g8cffd
A C++ library containing an eclectic mix of useful, advanced components.
intrusive.hpp
Go to the documentation of this file.
1 #ifndef LIBJMMCG_CORE_INTRUSIVE_HPP
2 #define LIBJMMCG_CORE_INTRUSIVE_HPP
3 
4 /******************************************************************************
5 ** Copyright © 2011 by J.M.McGuiness, coder@hussar.me.uk
6 **
7 ** This library is free software; you can redistribute it and/or
8 ** modify it under the terms of the GNU Lesser General Public
9 ** License as published by the Free Software Foundation; either
10 ** version 2.1 of the License, or (at your option) any later version.
11 **
12 ** This library is distributed in the hope that it will be useful,
13 ** but WITHOUT ANY WARRANTY; without even the implied warranty of
14 ** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 ** Lesser General Public License for more details.
16 **
17 ** You should have received a copy of the GNU Lesser General Public
18 ** License along with this library; if not, write to the Free Software
19 ** Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21 
22 #include "shared_ptr.hpp"
23 
24 #include <algorithm>
25 #include <iterator>
26 
27 namespace jmmcg { namespace LIBJMMCG_VER_NAMESPACE { namespace intrusive {
28 
29  namespace private_ {
30  template<class, class> class slist_iterator_internal;
31  }
32 
33  template<class, class> class stack;
34  template<class, class> class slist;
35 
36  /// Singly-linked intrusive node details. Inherit from this to provide the linkage structure and object-lifetime semantics of the list, which are specifically shared_ptr semantics.
37  /**
38  The use of the shared_ptr is very important: it solves the ABA problem that this implementation would otherwise suffer from.
39  */
40  template<
41  class LkT
42  >
43  class node_details_itf : virtual public sp_counter_itf_type<long> {
44  public:
45  using base_t=sp_counter_itf_type<long>;
46  using lock_traits=LkT;
47  using atomic_ctr_t=base_t;
48  using atomic_ptr_t=typename lock_traits::template atomic<node_details_itf *>;
49 
50  atomic_ptr_t next;
51 
52  virtual ~node_details_itf() noexcept(true) FORCE_INLINE {}
53 
54  virtual tstring
55  to_string() const noexcept(false) FORCE_INLINE {
56  return "";
57  }
58 
59  protected:
60  node_details_itf() noexcept(true) FORCE_INLINE
61  : next() {}
62  };
63 
64  namespace private_ {
65 
66  /// Hold the first node in the slist.
67  template<
68  class LkT
69  >
70  class node_details final : public node_details_itf<LkT>, public sp_counter_type<typename node_details_itf<LkT>::atomic_ctr_t::value_type, typename node_details_itf<LkT>::lock_traits> {
71  public:
72  using base_t=node_details_itf<LkT>;
73  using base2_t=sp_counter_type<typename base_t::atomic_ctr_t::value_type, typename base_t::lock_traits>;
74  using atomic_ctr_t=typename base2_t::atomic_ctr_t;
75  using lock_traits=typename base_t::lock_traits;
76 
77  constexpr node_details() noexcept(true) FORCE_INLINE
78  : base_t() {}
79  ~node_details() noexcept(true)=default;
80  void operator=(node_details const &)=delete;
81  void operator=(node_details &&)=delete;
82 
83  tstring
84  to_string() const noexcept(false) FORCE_INLINE override {
85  return base2_t::sp_to_string();
86  }
87  };
88 
89  template<class V, class LkT> struct end;
90 
91  /**
92  If you don't like the cut-down interface of this container, blame YAGNI & TDD.
93  */
94  template<class V, class LkT>
95  class slist_iterator_internal {
96  public:
97  using lock_traits=LkT;
98  typedef std::forward_iterator_tag iterator_category;
99  typedef std::ptrdiff_t difference_type;
100  typedef shared_ptr<V, lock_traits> value_type;
101  typedef shared_ptr<V, lock_traits> pointer;
102  typedef const shared_ptr<V, lock_traits> const_pointer;
103  typedef pointer & reference;
105 
106  typedef typename value_type::deleter_t deleter_t;
107  typedef typename value_type::ctr_type ctr_type;
109 
110  private:
111  using node_ptr_t=shared_ptr<node_details_itf<lock_traits>, lock_traits>;
112 
113  BOOST_MPL_ASSERT((std::is_base_of<typename node_ptr_t::value_type, typename value_type::value_type>));
114 
115  public:
116  explicit __stdcall slist_iterator_internal(const_reference n) noexcept(true) FORCE_INLINE
117  : node(n),
118  real_node(node.get().get() ? node->next : node.get()) {
119  assert(node.get().get() ? node->next==real_node.get() : true);
120  }
121  __stdcall slist_iterator_internal(slist_iterator_internal const &n) noexcept(true) FORCE_INLINE
122  : node(n.node),
123  real_node(node.get().get() ? node->next : node.get()) {
124  }
125  __stdcall slist_iterator_internal & operator=(slist_iterator_internal const &n) noexcept(true) FORCE_INLINE {
126  node=n.node;
127  real_node=n.real_node;
128  return *this;
129  }
130 
131  bool __fastcall operator==(slist_iterator_internal const &n) const noexcept(true) FORCE_INLINE {
132  if (node.get().get() && n.node.get().get()) {
133  return node->next==n.node->next;
134  } else if (!node.get() && !n.node.get()) {
135  return true;
136  } else if (node.get().get() && !n.node.get()) {
137  return node->next==n.node.get();
138  } else {
139  return false;
140  }
141  }
142  bool __fastcall operator!=(slist_iterator_internal const &n) const noexcept(true) FORCE_INLINE {
143  return !operator==(n);
144  }
145  bool __fastcall operator==(end<V, LkT> const &n) const noexcept(true) FORCE_INLINE;
146 
147  slist_iterator_internal &__fastcall operator++() noexcept(true) FORCE_INLINE {
148  const typename node_ptr_t::atomic_ptr_t next_real_node(real_node.get().get() ? real_node->next : typename node_ptr_t::atomic_ptr_t());
149  node=real_node;
150  real_node=node_ptr_t(next_real_node);
151  assert(node.get().get() ? node->next==real_node.get() : true);
152  return *this;
153  }
154  [[nodiscard]] slist_iterator_internal __fastcall operator++(int) noexcept(true) FORCE_INLINE {
155  const slist_iterator_internal tmp(*this);
156  ++*this;
157  return tmp;
158  }
159  pointer __fastcall operator*() noexcept(true) FORCE_INLINE {
160  return pointer(real_node);
161  }
162  const_pointer __fastcall operator*() const noexcept(true) FORCE_INLINE {
163  return const_pointer(real_node);
164  }
165  pointer __fastcall operator->() noexcept(true) FORCE_INLINE {
166  return pointer(real_node);
167  }
168  const_pointer __fastcall operator->() const noexcept(true) FORCE_INLINE {
169  return const_pointer(real_node);
170  }
171 
172  friend tostream &
173  operator<<(tostream &os, slist_iterator_internal const &i) {
174  os<<"node="<<i.node
175  <<", real_node="<<i.real_node;
176  return os;
177  }
178 
179  private:
180  friend class stack<V, LkT>;
181  friend class slist<V, LkT>;
182 
183  /**
184  Note how, like slist::penultimate_, node is one-before-the-node-we-want, which allows us to slist::erase() in constant time.
185  */
186  node_ptr_t node;
187  node_ptr_t real_node;
188  };
189  template<class V, class LkT>
190  class slist_iterator_internal<V const, LkT> {
191  public:
192  using lock_traits=LkT;
193  typedef std::forward_iterator_tag iterator_category;
194  typedef std::ptrdiff_t difference_type;
195  typedef shared_ptr<V, lock_traits> value_type;
196  typedef shared_ptr<V, lock_traits> pointer;
197  typedef const shared_ptr<V, lock_traits> const_pointer;
198  typedef pointer & reference;
200 
201  typedef typename value_type::deleter_t deleter_t;
202  typedef typename value_type::ctr_type ctr_type;
204 
205  private:
206  using node_ptr_t=shared_ptr<node_details_itf<lock_traits>, lock_traits>;
207 
208  BOOST_MPL_ASSERT((std::is_base_of<typename node_ptr_t::value_type, typename value_type::value_type>));
209 
210  public:
211  explicit __stdcall slist_iterator_internal(const_reference n) noexcept(true) FORCE_INLINE
212  : node(n), real_node(node.get().get() ? node->next : node.get()) {
213  assert(node.get().get() ? node->next==real_node.get().get() : true);
214  }
215  explicit __stdcall slist_iterator_internal(node_ptr_t n) noexcept(true) FORCE_INLINE
216  : node(n), real_node(node.get().get() ? node->next : node.get()) {
217  assert(node.get().get() ? node->next==real_node.get() : true);
218  }
219  __stdcall slist_iterator_internal(slist_iterator_internal const &n) noexcept(true) FORCE_INLINE
220  : node(n.node), real_node(node.get().get() ? node->next : node.get()) {
221  }
222 
223  bool __fastcall operator==(slist_iterator_internal const &n) const noexcept(true) FORCE_INLINE {
224  if (node.get().get() && n.node.get().get()) {
225  return node->next==n.node->next;
226  } else if (!node.get() && !n.node.get()) {
227  return true;
228  } else if (node.get().get() && !n.node.get()) {
229  return node->next==n.node.get();
230  } else {
231  return false;
232  }
233  }
234  bool __fastcall operator!=(slist_iterator_internal const &n) const noexcept(true) FORCE_INLINE {
235  return !operator==(n);
236  }
237 
238  slist_iterator_internal &__fastcall operator++() noexcept(true) FORCE_INLINE {
239  const typename node_ptr_t::atomic_ptr_t next_real_node(real_node.get().get() ? real_node->next : typename node_ptr_t::atomic_ptr_t());
240  node=real_node;
241  real_node=node_ptr_t(next_real_node);
242  assert(node.get().get() ? node->next==real_node.get() : true);
243  return *this;
244  }
245  [[nodiscard]] slist_iterator_internal __fastcall operator++(int) noexcept(true) FORCE_INLINE {
246  const slist_iterator_internal tmp(*this);
247  ++*this;
248  return tmp;
249  }
250  const_pointer __fastcall operator*() const noexcept(true) FORCE_INLINE {
251  return const_pointer(real_node);
252  }
253  const_pointer __fastcall operator->() const noexcept(true) FORCE_INLINE {
254  return const_pointer(real_node);
255  }
256 
257  friend tostream &
258  operator<<(tostream &os, slist_iterator_internal const &i) {
259  os<<"node="<<i.node
260  <<", real_node="<<i.real_node;
261  return os;
262  }
263 
264  private:
265  friend class stack<V, LkT>;
266  friend class slist<V, LkT>;
267 
268  /**
269  Note how, like slist::penultimate_, node is one-before-the-node-we-want, which allows us to slist::erase() in constant time.
270  */
271  node_ptr_t node;
272  node_ptr_t real_node;
273  };
274 
275  template<class V, class LkT>
276  struct end final : public slist_iterator_internal<V, LkT> {
277  typedef slist_iterator_internal<V, LkT> base_t;
278  typedef typename base_t::value_type value_type;
279 
280  constexpr __stdcall end() FORCE_INLINE
281  : base_t(value_type()) {
282  }
283  constexpr bool __fastcall operator==(end const &) const noexcept(true) FORCE_INLINE {
284  return true;
285  }
286  constexpr bool __fastcall operator!=(end const &n) const noexcept(true) FORCE_INLINE {
287  return !operator==(n);
288  }
289  };
290 
291  template<class V, class LkT> inline bool FORCE_INLINE
292  slist_iterator_internal<V, LkT>::operator==(end<V, LkT> const &n) const noexcept(true) {
293  return node.get().get() ? node->next==n.node.get().get() : !node.get();
294  }
295 
296  }
297 
298  /// A "good enough for PPD" implementation of a singly-linked, hybrid, intrusive, pointer-based stack.
299  /**
300  When inserting a node, no memory allocations are required, which is good in a multi-threading environment as it reduces calls to any memory allocator.
301  If you don't like the cut-down interface of this container, blame YAGNI & TDD.
302 
303  \see boost::stack, std::stack
304  */
305  template<
306  class V, ///< The type of the contained objects, which must be intrusive, and publicly inherit from node_details.
307  class LkT ///< The lock_traits that providing the (potentially) atomic operations to be used upon pointers to the value_type.
308  >
309  class stack {
310  protected:
311  using node_details_t=private_::node_details<LkT>;
312  using atomic_ptr_t=typename node_details_t::base_t::atomic_ptr_t;
313 
314  public:
315  using lock_traits=typename node_details_t::lock_traits;
316  typedef std::size_t size_type;
317  typedef shared_ptr<V, LkT> value_type;
318  typedef typename lock_traits::template atomic_counter_type<unsigned long> size_ctr_t;
319  typedef private_::slist_iterator_internal<V, LkT> iterator;
320  typedef private_::slist_iterator_internal<V const, LkT> const_iterator;
322  typedef typename iterator::pointer pointer;
323  typedef typename iterator::reference reference;
326 
327  typedef typename value_type::deleter_t deleter_t;
328  typedef typename value_type::ctr_type ctr_type;
330  /**
331  To assist in allowing compile-time computation of the algorithmic order of the threading model.
332  */
334  atomic_ptr_t::memory_access_mode==ppd::generic_traits::memory_access_modes::crew_memory_access
338  );
339 
340  BOOST_MPL_ASSERT((std::is_base_of<typename node_details_t::base_t, typename value_type::value_type>));
341 
342  __stdcall stack() noexcept(true) FORCE_INLINE;
343  stack(stack const &)=delete;
344  stack(stack &&) noexcept(true) FORCE_INLINE;
345  /**
346  Algorithmic complexity: O(n)
347  */
348  virtual __stdcall ~stack() noexcept(true) FORCE_INLINE;
349 
350  /**
351  Algorithmic complexity: O(1)
352  */
353  iterator __fastcall begin() noexcept(true) FORCE_INLINE;
354  /**
355  Algorithmic complexity: O(1)
356  */
357  const_iterator __fastcall begin() const noexcept(true) FORCE_INLINE;
358  /**
359  Algorithmic complexity: O(1)
360  */
361  iterator __fastcall end() noexcept(true) FORCE_INLINE;
362  /**
363  Algorithmic complexity: O(1)
364  */
365  const_iterator __fastcall end() const noexcept(true) FORCE_INLINE;
366  /// Return true if the container is empty, false otherwise.
367  /**
368  Algorithmic complexity: O(1)
369 
370  \return True if the container is empty, false otherwise.
371  */
372  bool __fastcall empty() const noexcept(true) FORCE_INLINE;
373  /// Atomically return the number of elements in the container.
374  /**
375  Algorithmic complexity: O(1)
376 
377  \return The number of elements in the container.
378  */
379  size_type __fastcall size() const noexcept(true) FORCE_INLINE;
380  /// Non-atomically return the number of elements in the container.
381  /**
382  Algorithmic complexity: O(n)
383  Mainly used for verifying list integrity in testing. Not much use for anything else.
384 
385  \return The number of elements in the container.
386  */
387  size_type __fastcall size_n() const noexcept(true) FORCE_INLINE;
388  /// Remove the element from the container.
389  /**
390  Algorithmic complexity: O(1)
391 
392  \param v Iterator to the element to be removed.
393  */
394  void __fastcall erase(iterator v) noexcept(true) FORCE_INLINE;
395  /// Non-atomically remove the element from the container.
396  /**
397  Algorithmic complexity: O(n).
398 
399  \param v The value equivalent to the element to be removed.
400  \return The number of elements erased from the list, at most 1.
401  */
402  size_type __fastcall erase(const_reference v) noexcept(true) FORCE_INLINE;
403  /// Remove all of the elements from the container.
404  /**
405  Calls the dtor for each element within the container.
406  Algorithmic complexity: O(n)
407  */
408  void __fastcall clear() noexcept(true) FORCE_INLINE;
409  /**
410  Algorithmic complexity: O(1)
411 
412  \return The top of the stack.
413  */
414  value_type __fastcall top() noexcept(true) FORCE_INLINE;
415  /**
416  Algorithmic complexity: O(1)
417 
418  \return The top of the stack.
419  */
420  value_type __fastcall top() const noexcept(true) FORCE_INLINE;
421  /**
422  Algorithmic complexity: O(1)
423 
424  \param v The element to be added.
425  */
426  void __fastcall push(value_type const &v) noexcept(true) FORCE_INLINE;
427  /**
428  Algorithmic complexity: O(1)
429 
430  \param v The element to be added.
431  */
432  void __fastcall push(value_type &&v) noexcept(true) FORCE_INLINE;
433  /**
434  Algorithmic complexity: O(1)
435 
436  \param v The element to be added.
437  */
438  void __fastcall push_front(value_type const &v) noexcept(true) FORCE_INLINE;
439  /**
440  Algorithmic complexity: O(1)
441 
442  \param v The element to be added.
443  */
444  void __fastcall push_front(value_type &&v) noexcept(true) FORCE_INLINE;
445  /**
446  Algorithmic complexity: O(1)
447  */
448  void __fastcall pop() noexcept(true) FORCE_INLINE;
449  /**
450  Algorithmic complexity: O(1)
451  */
452  void __fastcall pop_front() noexcept(true) FORCE_INLINE;
453  /**
454  Algorithmic complexity: O(1)
455  This method assumes that only one thread is popping at a time, and that the container is not empty.
456  */
457  value_type __fastcall pop_top_nochk() noexcept(true) FORCE_INLINE;
458 
459  protected:
460  mutable node_details_t prefront_;
462 
463  typename node_details_t::base_t::atomic_ptr_t
464  unlink_node(typename node_details_t::base_t::atomic_ptr_t &node) noexcept(true) FORCE_INLINE;
465 
466  /**
467  Algorithmic complexity: O(1)
468 
469  \param i The iterator immediately after which the element should be added.
470  \param v The element to be added.
471 
472  \see push()
473  */
474  static void insert(typename node_details_t::base_t::atomic_ptr_t i, value_type v) noexcept(true) FORCE_INLINE;
475  void insert(value_type v) noexcept(true) FORCE_INLINE;
476  };
477 
478  /// A "good enough for PPD" implementation of a singly-linked, hybrid, intrusive, pointer-based list.
479  /**
480  When inserting a node, no memory allocations are required, which is good in a multi-threading environment as it reduces calls to any memory allocator.
481  If you don't like the cut-down interface of this container, blame YAGNI & TDD.
482 
483  \see boost::instrusive::slist, boost::slist, std::list
484  */
485  template<
486  class V, ///< The type of the contained objects, which must be intrusive, and publicly inherit from node_details.
487  class LkT ///< The lock_traits that providing the (potentially) atomic operations to be used upon pointers to the value_type.
488  >
489  class slist : public stack<V, LkT> {
490  public:
491  typedef stack<V, LkT> base_t;
492  typedef typename base_t::value_type value_type;
493  typedef typename base_t::lock_traits lock_traits;
494  typedef typename base_t::size_ctr_t size_ctr_t;
495  typedef typename base_t::size_type size_type;
496  typedef typename base_t::iterator iterator;
499  typedef typename base_t::pointer pointer;
500  typedef typename base_t::reference reference;
501  typedef typename base_t::const_pointer const_pointer;
503 
504  typedef typename base_t::deleter_t deleter_t;
505  typedef typename base_t::ctr_type ctr_type;
507 
508  using base_t::erase;
509 
510  __stdcall slist() noexcept(true) FORCE_INLINE;
511  slist(slist &&) noexcept(true) FORCE_INLINE;
512  /**
513  Algorithmic complexity: O(n)
514  */
515  __stdcall ~slist() noexcept(true) FORCE_INLINE;
516 
517  /// Find if the element is within the container.
518  /**
519  Algorithmic complexity: O(n).
520 
521  \param v The element to be found.
522  \return The iterator to the element within the container, or end() otherwise.
523  */
524  iterator __fastcall find(const_reference v) noexcept(true) FORCE_INLINE;
525  /// Find if the element is within the container.
526  /**
527  Algorithmic complexity: O(n).
528 
529  \param v The element to be found.
530  \return The iterator to the element within the container, or end() otherwise.
531  */
532  const_iterator __fastcall find(const_reference v) const noexcept(true) FORCE_INLINE;
533  /// Remove the element from the container.
534  /**
535  Algorithmic complexity: O(n).
536 
537  \param v The value equivalent to the element to be removed.
538  \return The number of elements erased from the list, at most 1.
539  */
540  size_type __fastcall erase(const_reference v) noexcept(true) FORCE_INLINE;
541  /**
542  Algorithmic complexity: O(1)
543  */
544  value_type __fastcall front() noexcept(true) FORCE_INLINE;
545  /**
546  Algorithmic complexity: O(1)
547 
548  \return The front of the list.
549  */
550  value_type __fastcall front() const noexcept(true) FORCE_INLINE;
551  /**
552  Algorithmic complexity: O(1)
553  */
554  value_type __fastcall back() noexcept(true) FORCE_INLINE;
555  /**
556  Algorithmic complexity: O(1)
557  */
558  value_type __fastcall back() const noexcept(true) FORCE_INLINE;
559  /**
560  Algorithmic complexity: O(1)
561 
562  \param v The element to be added.
563  */
564  void __fastcall push_front(value_type const &v) noexcept(true) FORCE_INLINE;
565  /**
566  Algorithmic complexity: O(1)
567 
568  \param v The element to be added.
569  */
570  void __fastcall push_front(value_type &&v) noexcept(true) FORCE_INLINE;
571  /**
572  Algorithmic complexity: O(1)
573  */
574  void __fastcall pop_front() noexcept(true) FORCE_INLINE;
575  /**
576  Note that this is NOT atomic! So NOT thread-safe.
577  Algorithmic complexity: O(1)
578 
579  \param v The element to be added.
580  */
581  void __fastcall push_back(value_type const &v) noexcept(true) FORCE_INLINE;
582  /**
583  Note that this is NOT atomic! So NOT thread-safe.
584  Algorithmic complexity: O(1)
585 
586  \param v The element to be added.
587  */
588  void __fastcall push_back(value_type &&v) noexcept(true) FORCE_INLINE;
589 
590  /**
591  Algorithmic complexity: O(n)
592 
593  \return True if the internal iterators are consistent.
594  */
595  bool penultimate_reachable_from_prefront() const noexcept(true) FORCE_INLINE;
596 
597  private:
598  typedef typename base_t::node_details_t node_details_t;
599  /**
600  Note that penultimate_ is not "end()", but the penultimate node in the container, which allows us to push_back() in constant time.
601 
602  \see end()
603  */
604  typename node_details_t::base_t::atomic_ptr_t penultimate_;
605 
606  static void move_penultimate(typename node_details_t::base_t::atomic_ptr_t &ptr) noexcept(true) FORCE_INLINE;
607  };
608 
609 } } }
610 
611 #include "intrusive_impl.hpp"
612 
613 #endif