libjmmcg  release_579_6_g8cffd
A C++ library containing an eclectic mix of useful, advanced components.
bitfield_map_impl.hpp
Go to the documentation of this file.
1 /******************************************************************************
2 ** Copyright © 2014 by J.M.McGuiness, coder@hussar.me.uk
3 **
4 ** This library is free software; you can redistribute it and/or
5 ** modify it under the terms of the GNU Lesser General Public
6 ** License as published by the Free Software Foundation; either
7 ** version 2.1 of the License, or (at your option) any later version.
8 **
9 ** This library is distributed in the hope that it will be useful,
10 ** but WITHOUT ANY WARRANTY; without even the implied warranty of
11 ** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 ** Lesser General Public License for more details.
13 **
14 ** You should have received a copy of the GNU Lesser General Public
15 ** License along with this library; if not, write to the Free Software
16 ** Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17 */
18 
19 namespace jmmcg { namespace LIBJMMCG_VER_NAMESPACE {
20 
21 namespace private_ {
22 
23  using underlying_type=uint64_t;
24 
25  template<underlying_type bit_pos, class Iter, class End, bool AtEnd=std::is_same<Iter, End>::value>
26  struct index {
27  /// Compute the number of bytes offset that the bit_pos represents given a bitmask mask that maps to the range of types contained in the range [Iter, End).
28  /**
29  \param mask The bitmaps that maps to the range of enabled types contained in the range [Iter, End).
30  \return The number of bytes offest.
31  */
32  template<class BMsk>
33  static constexpr std::size_t FORCE_INLINE
34  result(BMsk mask) noexcept(true) {
35  using selected_obj_type=typename boost::mpl::deref<Iter>::type::second;
36 
37  if (mask!=BMsk()) {
38  const std::size_t current_size=((mask & 0x1u) ? static_cast<std::size_t>(give_void_a_size_of<selected_obj_type>::value) : 0u);
39  return current_size+index<bit_pos-1u, typename boost::mpl::next<Iter>::type, End>::result(mask>>1u);
40  } else {
41  return std::size_t();
42  }
43  }
44  };
45  /**
46  Nothing left from the collection of bit fields types, so terminate. In principle we should never get here, the bit position should terminate beforehand or at the same time.
47  */
48  template<underlying_type bit_pos, class Iter, class End>
49  struct index<bit_pos, Iter, End, true> {
50  template<class BMsk>
51  static constexpr std::size_t FORCE_INLINE
52  result(BMsk) noexcept(true) {
53  return std::size_t();
54  }
55  };
56  /**
57  Remember that bit positions are 1-based in C++, but collections are zero-indexed, so ignore the first type entry in the collection if the user requests the first bit position.
58  */
59  template<class Iter, class End, bool AtEnd>
60  struct index<1u, Iter, End, AtEnd> {
61  template<class BMsk>
62  static constexpr std::size_t FORCE_INLINE
63  result(BMsk) noexcept(true) {
64  return std::size_t();
65  }
66  };
67  /**
68  Remember that bit positions are 1-based in C++, but collections are zero-indexed, so ignore the first type entry in the collection if the user requests the first bit position.
69  */
70  template<class Iter, class End>
71  struct index<1u, Iter, End, true> {
72  template<class BMsk>
73  static constexpr std::size_t FORCE_INLINE
74  result(BMsk) noexcept(true) {
75  return std::size_t();
76  }
77  };
78  /**
79  Nothing left from the selected bit positions, so terminate.
80  */
81  template<class Iter, class End>
82  struct index<0u, Iter, End, false> {
83  template<class BMsk>
84  static constexpr std::size_t FORCE_INLINE
85  result(BMsk) noexcept(true) {
86  return std::size_t();
87  }
88  };
89 
90  template<class Iter, class End, bool AtEnd=std::is_same<Iter, End>::value>
91  struct current_size {
92  /// Compute the total number of bytes that the bitmask mask that maps to the range of types contained in the range [Iter, End).
93  /**
94  \param mask The bitmaps that maps to the range of types contained in the range [Iter, End).
95  \return The total number of bytes of enabled types.
96  */
97  static constexpr std::size_t FORCE_INLINE
98  result(underlying_type mask) noexcept(true) {
99  using selected_obj_type=typename boost::mpl::deref<Iter>::type::second;
100 
101  if (mask!=underlying_type()) {
102  const std::size_t curr_size=((mask & 0x1u) ? static_cast<std::size_t>(give_void_a_size_of<selected_obj_type>::value) : 0u);
103  return curr_size+current_size<typename boost::mpl::next<Iter>::type, End>::result(mask>>1u);
104  } else {
105  return std::size_t();
106  }
107  }
108  };
109  /**
110  Nothing left from the collection of bit fields types, so terminate. In principle we should never get here, the bit position should terminate beforehand or at the same time.
111  */
112  template<class Iter, class End>
113  struct current_size<Iter, End, true> {
114  static constexpr std::size_t FORCE_INLINE
115  result(underlying_type) noexcept(true) {
116  return std::size_t();
117  }
118  };
119 
120  template<
121  class Cls,
122  typename Cls::bitfields_tags_type SelectedField,
123  class AsType,
124  class Ret
125  >
126  inline Ret const & FORCE_INLINE
127  at(typename Cls::key_type const &bfs, typename Cls::raw_mapped_data_t const &raw_mapped_data) noexcept(false) {
128  BOOST_MPL_ASSERT_NOT((std::is_same<typename boost::mpl::at<typename Cls::mapped_types, AsType>::type, boost::mpl::void_>));
129 
130  if (bfs & static_cast<typename Cls::key_type>(SelectedField)) {
131  using indexer=index<
132  mpl::bit_position<static_cast<typename Cls::underlying_key_type>(SelectedField)>::value,
133  typename boost::mpl::begin<typename Cls::mapped_types>::type,
134  typename boost::mpl::end<typename Cls::mapped_types>::type
135  >;
136  const std::size_t offset_in_range_types=indexer::result(bfs);
137  assert(offset_in_range_types<Cls::range_mapped_types_size);
138  const typename Cls::raw_mapped_data_t::value_type &data=raw_mapped_data[offset_in_range_types];
139  return *reinterpret_cast<Ret const *>(&data);
140  } else {
141  std::ostringstream os;
142  os<<"Selected field: 0x"<<std::hex<<static_cast<typename Cls::underlying_key_type>(SelectedField)<<", not found in mask=0x"<<std::hex<<bfs;
143  throw std::range_error(os.str());
144  }
145  }
146 
147  template<
148  class Cls,
149  typename Cls::bitfields_tags_type SelectedField,
150  class AsType,
151  class Ret
152  >
153  inline Ret & FORCE_INLINE
154  at(typename Cls::key_type const &bfs, typename Cls::raw_mapped_data_t &raw_mapped_data) noexcept(false) {
155  BOOST_MPL_ASSERT_NOT((std::is_same<typename boost::mpl::at<typename Cls::mapped_types, AsType>::type, boost::mpl::void_>));
156 
157  if (bfs & static_cast<typename Cls::underlying_key_type>(SelectedField)) {
158  using indexer=index<
159  mpl::bit_position<static_cast<typename Cls::underlying_key_type>(SelectedField)>::value,
160  typename boost::mpl::begin<typename Cls::mapped_types>::type,
161  typename boost::mpl::end<typename Cls::mapped_types>::type
162  >;
163  const std::size_t offset_in_range_types=indexer::result(bfs);
164  assert(offset_in_range_types<Cls::range_mapped_types_size);
165  typename Cls::raw_mapped_data_t::value_type &data=raw_mapped_data[offset_in_range_types];
166  return *reinterpret_cast<Ret *>(&data);
167  } else {
168  std::ostringstream os;
169  os<<"Selected field: 0x"<<std::hex<<static_cast<typename Cls::underlying_key_type>(SelectedField)<<", not found in mask=0x"<<std::hex<<bfs;
170  throw std::range_error(os.str());
171  }
172  }
173 
174  template<
175  class Cls,
176  typename Cls::bitfields_tags_type SelectedField,
177  class AsType,
178  class Ret
179  >
180  inline constexpr void FORCE_INLINE
181  erase(typename Cls::key_type &bfs, typename Cls::raw_mapped_data_t &raw_mapped_data) noexcept(true) {
182  BOOST_MPL_ASSERT_NOT((std::is_same<typename boost::mpl::at<typename Cls::mapped_types, AsType>::type, boost::mpl::void_>));
183  if (bfs & SelectedField) {
184  if (!Cls::all_pod) {
185  using indexer=index<
186  mpl::bit_position<static_cast<typename Cls::underlying_key_type>(SelectedField)>::value,
187  typename boost::mpl::begin<typename Cls::mapped_types>::type,
188  typename boost::mpl::end<typename Cls::mapped_types>::type
189  >;
190  const std::size_t offset_in_range_types=indexer::result(bfs);
191  assert(offset_in_range_types<Cls::range_mapped_types_size);
192  Ret *data=reinterpret_cast<Ret *>(std::next(raw_mapped_data.begin(), offset_in_range_types));
193  data->~Ret();
194  }
195  bfs&=(~SelectedField);
196  }
197  }
198 
199  template<
200  class Cls,
201  typename Cls::bitfields_tags_type SelectedField,
202  class AsType,
203  class Arg
204  >
205  inline void FORCE_INLINE
206  insert(typename Cls::key_type &bfs, typename Cls::raw_mapped_data_t &raw_mapped_data, Arg const &arg) noexcept(false) {
207  BOOST_MPL_ASSERT_NOT((std::is_same<typename boost::mpl::at<typename Cls::mapped_types, AsType>::type, boost::mpl::void_>));
208 // TODO BOOST_MPL_ASSERT_NOT((static_cast<unsigned long long>(mpl::count_setbits<static_cast<typename Cls::underlying_key_type>(SelectedField)>::value)));
209 
210  using indexer=index<
211  mpl::bit_position<static_cast<typename Cls::underlying_key_type>(SelectedField)>::value,
212  typename boost::mpl::begin<typename Cls::mapped_types>::type,
213  typename boost::mpl::end<typename Cls::mapped_types>::type
214  >;
215 
216  // Verify that there are no higher values set, otherwise adding keys that are lower will cause the higher keys to index uninitialised memory.
217  assert(((bfs|mpl::lsb_bitmask<static_cast<unsigned long long>(SelectedField)>::value)^mpl::lsb_bitmask<static_cast<unsigned long long>(SelectedField)>::value)==0u);
218  const std::size_t offset_in_range_types=indexer::result(bfs);
219  assert(offset_in_range_types<Cls::range_mapped_types_size);
220  typename Cls::raw_mapped_data_t::value_type &data=raw_mapped_data[offset_in_range_types];
221  if (!(bfs & static_cast<typename Cls::underlying_key_type>(SelectedField))) {
222  if (!Cls::all_pod) {
223  new(&data) Arg(arg);
224  } else {
225  *reinterpret_cast<Arg *>(&data)=arg;
226  }
227  bfs|=static_cast<typename Cls::underlying_key_type>(SelectedField);
228  }
229  assert(bfs);
230  }
231 
232  template<class ObjToDel>
234  template<class Cls, typename Cls::key_type SelectedField>
235  static constexpr void FORCE_INLINE
236  result(typename Cls::key_type &mask, typename Cls::raw_mapped_data_t &raw_mapped_data) noexcept(true) {
237  mask&=(~SelectedField);
238  const std::size_t offset_in_range_types=index<
239  mpl::bit_position<static_cast<typename Cls::underlying_key_type>(SelectedField)>::value,
240  typename boost::mpl::begin<typename Cls::mapped_types>::type,
241  typename boost::mpl::end<typename Cls::mapped_types>::type
242  >::result(mask);
243  assert(offset_in_range_types<Cls::range_mapped_types_size);
244  ObjToDel *data=reinterpret_cast<ObjToDel *>(std::next(raw_mapped_data.begin(), offset_in_range_types));
245  data->~ObjToDel();
246  }
247  };
248  template<>
249  struct delete_non_void<void> {
250  template<class Cls, typename Cls::key_type SelectedField>
251  static constexpr void FORCE_INLINE
252  result(typename Cls::key_type &, typename Cls::raw_mapped_data_t &) noexcept(true) {
253  }
254  };
255 
256  template<class Cls, unsigned long long SelectedField>
257  struct deletor {
258  BOOST_MPL_ASSERT_RELATION(sizeof(typename Cls::key_type), <=, sizeof(unsigned long long));
259 
260  static constexpr typename Cls::key_type FORCE_INLINE
261  result(typename Cls::key_type mask, typename Cls::raw_mapped_data_t &raw_mapped_data) noexcept(true) {
262  if (mask & SelectedField) {
263  using AsType=typename std::integral_constant<typename Cls::bitfields_tags_type, static_cast<typename Cls::bitfields_tags_type>(SelectedField)>::type;
264  using Ret=typename boost::mpl::at<typename Cls::mapped_types, AsType>::type;
265  delete_non_void<Ret>::template result<Cls, static_cast<typename Cls::key_type>(SelectedField)>(mask, raw_mapped_data);
266  }
267  return deletor<Cls, (SelectedField>>1u)>::result(mask, raw_mapped_data);
268  }
269  };
270  template<class Cls>
271  struct deletor<Cls, 0u> {
272  static constexpr typename Cls::key_type FORCE_INLINE
273  result(typename Cls::key_type mask, typename Cls::raw_mapped_data_t const &) noexcept(true) {
274  return mask;
275  }
276  };
277 
278  template<class Cls>
279  inline constexpr void FORCE_INLINE
280  clear(typename Cls::key_type &bfs, typename Cls::raw_mapped_data_t &raw_mapped_data) noexcept(true) {
281  enum : typename Cls::key_type {
282  msb_set=~(std::numeric_limits<typename Cls::key_type>::max()>>1)
283  };
284  if (!Cls::all_pod) {
285  bfs=deletor<Cls, msb_set>::result(bfs, raw_mapped_data);
286  } else {
287  bfs=typename Cls::key_type();
288  }
289  }
290 
291 }
292 
293 template<class BFSM, std::size_t BFSz>
294 inline constexpr
295 bitfield_map<BFSM, BFSz>::bitfield_map() noexcept(true)
296 : bfs() {
297  BOOST_MPL_ASSERT_RELATION(range_mapped_types_size, <, sizeof(bitfield_map));
298  raw_mapped_data.fill(typename raw_mapped_data_t::value_type());
299 }
300 
301 template<class BFSM, std::size_t BFSz>
302 inline constexpr typename bitfield_map<BFSM, BFSz>::key_type
303 bitfield_map<BFSM, BFSz>::convert_to_biggest_integral_type() const noexcept(true) {
304  BOOST_MPL_ASSERT_RELATION(sizeof(key_type), <=, sizeof(private_::underlying_type));
305  converter conv{
306  key_type()
307  };
308  conv.bfs=bfs;
309  return conv.conv_bfs;
310 }
311 
312 template<class BFSM, std::size_t BFSz>
313 inline constexpr void
314 bitfield_map<BFSM, BFSz>::swap(bitfield_map &bm) noexcept(true) {
315  const raw_key_type_t tmp=bfs;
316  bfs=bm.bfs;
317  bm.bfs=tmp;
318  raw_mapped_data.swap(bm.raw_mapped_data);
319 }
320 
321 template<class BFSM, std::size_t BFSz>
322 inline constexpr
323 bitfield_map<BFSM, BFSz>::bitfield_map(bitfield_map const &bm) noexcept(true)
324 : bfs(bm.bfs) {
325  raw_mapped_data.fill(typename raw_mapped_data_t::value_type());
326  memcpy_opt(bm.raw_mapped_data, raw_mapped_data);
327 }
328 
329 template<class BFSM, std::size_t BFSz>
330 inline constexpr
331 bitfield_map<BFSM, BFSz>::bitfield_map(bitfield_map &&bm) noexcept(true)
332 : bfs() {
333  raw_mapped_data.fill(typename raw_mapped_data_t::value_type());
334  swap(bm);
335 }
336 
337 template<class BFSM, std::size_t BFSz>
338 inline constexpr void
339 bitfield_map<BFSM, BFSz>::clear() noexcept(true) {
340  converter conv{
341  key_type()
342  };
343  conv.bfs=bfs;
344  private_::clear<bitfield_map>(conv.conv_bfs, raw_mapped_data);
345  bfs=conv.bfs;
346 }
347 
348 template<class BFSM, std::size_t BFSz>
349 inline
350 bitfield_map<BFSM, BFSz>::~bitfield_map() noexcept(true) {
351  this->clear();
352 }
353 
354 template<class BFSM, std::size_t BFSz>
355 inline constexpr bitfield_map<BFSM, BFSz> &
356 bitfield_map<BFSM, BFSz>::operator=(bitfield_map &&bm) noexcept(true) {
357  swap(bm);
358  return *this;
359 }
360 
361 template<class BFSM, std::size_t BFSz>
362 inline constexpr bool
363 bitfield_map<BFSM, BFSz>::empty() const noexcept(true) {
364  return !static_cast<bool>(convert_to_biggest_integral_type());
365 }
366 
367 template<class BFSM, std::size_t BFSz>
368 inline constexpr typename bitfield_map<BFSM, BFSz>::size_type
369 bitfield_map<BFSM, BFSz>::size() const noexcept(true) {
370  const std::size_t sz=private_::current_size<
371  typename boost::mpl::begin<mapped_types>::type,
372  typename boost::mpl::end<mapped_types>::type
373  >::result(convert_to_biggest_integral_type());
374  assert(sz<=range_mapped_types_size);
375  return sz+bitfields_size;
376 }
377 
378 template<class BFSM, std::size_t BFSz>
379 inline constexpr typename bitfield_map<BFSM, BFSz>::size_type
380 bitfield_map<BFSM, BFSz>::max_size() noexcept(true) {
381  return static_cast<size_type>(bitfields_size+range_mapped_types_size);
382 }
383 
384 template<class BFSM, std::size_t BFSz>
385 template<typename bitfield_map<BFSM, BFSz>::bitfields_tags_type SelectedField, class AsType, class Ret>
386 inline constexpr const Ret &
387 bitfield_map<BFSM, BFSz>::at() const noexcept(false) {
388  return private_::at<bitfield_map, SelectedField, AsType, Ret>(convert_to_biggest_integral_type(), raw_mapped_data);
389 }
390 
391 template<class BFSM, std::size_t BFSz>
392 template<typename bitfield_map<BFSM, BFSz>::bitfields_tags_type SelectedField, class AsType, class Ret>
393 inline constexpr Ret &
394 bitfield_map<BFSM, BFSz>::at() noexcept(false) {
395  return private_::at<bitfield_map, SelectedField, AsType, Ret>(convert_to_biggest_integral_type(), raw_mapped_data);
396 }
397 
398 template<class BFSM, std::size_t BFSz>
399 template<typename bitfield_map<BFSM, BFSz>::bitfields_tags_type SelectedField, class AsType, class Ret>
400 inline void
401 bitfield_map<BFSM, BFSz>::erase() noexcept(true) {
402  converter conv{
403  key_type()
404  };
405  conv.bfs=bfs;
406  private_::erase<bitfield_map, SelectedField, AsType, Ret>(conv.conv_bfs, raw_mapped_data);
407  bfs=conv.bfs;
408 }
409 
410 template<class BFSM, std::size_t BFSz>
411 template<typename bitfield_map<BFSM, BFSz>::bitfields_tags_type SelectedField>
412 inline bool
413 bitfield_map<BFSM, BFSz>::find() const noexcept(true) {
414  converter conv{
415  key_type()
416  };
417  conv.bfs=bfs;
418  return convert_to_biggest_integral_type() & static_cast<underlying_key_type>(SelectedField);
419 }
420 
421 template<class BFSM, std::size_t BFSz>
422 template<typename bitfield_map<BFSM, BFSz>::bitfields_tags_type SelectedField, class AsType, class Arg>
423 inline void
424 bitfield_map<BFSM, BFSz>::push_back(Arg const &arg) noexcept(false) {
425  converter conv{
426  key_type()
427  };
428  conv.bfs=bfs;
429  private_::insert<bitfield_map, SelectedField, AsType, Arg>(conv.conv_bfs, raw_mapped_data, arg);
430  bfs=conv.bfs;
431 }
432 
433 } }