auto_buffer.hpp 36 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145
  1. // Copyright Thorsten Ottosen, 2009.
  2. // Distributed under the Boost Software License, Version 1.0. (See
  3. // accompanying file LICENSE_1_0.txt or copy at
  4. // http://www.boost.org/LICENSE_1_0.txt)
  5. #ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  6. #define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  7. #include <boost/detail/workaround.hpp>
  8. #if defined(_MSC_VER)
  9. # pragma once
  10. #endif
  11. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  12. #pragma warning(push)
  13. #pragma warning(disable:4996)
  14. #endif
  15. #include <boost/assert.hpp>
  16. #include <boost/config.hpp>
  17. #include <boost/iterator/reverse_iterator.hpp>
  18. #include <boost/iterator/iterator_traits.hpp>
  19. #include <boost/mpl/if.hpp>
  20. #include <boost/signals2/detail/scope_guard.hpp>
  21. #include <boost/swap.hpp>
  22. #include <boost/type_traits/aligned_storage.hpp>
  23. #include <boost/type_traits/alignment_of.hpp>
  24. #include <boost/type_traits/has_nothrow_copy.hpp>
  25. #include <boost/type_traits/has_nothrow_assign.hpp>
  26. #include <boost/type_traits/has_trivial_assign.hpp>
  27. #include <boost/type_traits/has_trivial_constructor.hpp>
  28. #include <boost/type_traits/has_trivial_destructor.hpp>
  29. #include <algorithm>
  30. #include <cstring>
  31. #include <iterator>
  32. #include <memory>
  33. #include <stdexcept>
  34. namespace boost
  35. {
  36. namespace signals2
  37. {
  38. namespace detail
  39. {
  40. //
  41. // Policies for creating the stack buffer.
  42. //
  43. template< unsigned N >
  44. struct store_n_objects
  45. {
  46. BOOST_STATIC_CONSTANT( unsigned, value = N );
  47. };
  48. template< unsigned N >
  49. struct store_n_bytes
  50. {
  51. BOOST_STATIC_CONSTANT( unsigned, value = N );
  52. };
  53. namespace auto_buffer_detail
  54. {
  55. template< class Policy, class T >
  56. struct compute_buffer_size
  57. {
  58. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
  59. };
  60. template< unsigned N, class T >
  61. struct compute_buffer_size< store_n_bytes<N>, T >
  62. {
  63. BOOST_STATIC_CONSTANT( unsigned, value = N );
  64. };
  65. template< class Policy, class T >
  66. struct compute_buffer_objects
  67. {
  68. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
  69. };
  70. template< unsigned N, class T >
  71. struct compute_buffer_objects< store_n_bytes<N>, T >
  72. {
  73. BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
  74. };
  75. }
  76. struct default_grow_policy
  77. {
  78. template< class SizeType >
  79. static SizeType new_capacity( SizeType capacity )
  80. {
  81. //
  82. // @remark: we grow the capacity quite agressively.
  83. // this is justified since we aim to minimize
  84. // heap-allocations, and because we mostly use
  85. // the buffer locally.
  86. return capacity * 4u;
  87. }
  88. template< class SizeType >
  89. static bool should_shrink( SizeType, SizeType )
  90. {
  91. //
  92. // @remark: when defining a new grow policy, one might
  93. // choose that if the waated space is less
  94. // than a certain percentage, then it is of
  95. // little use to shrink.
  96. //
  97. return true;
  98. }
  99. };
  100. template< class T,
  101. class StackBufferPolicy = store_n_objects<256>,
  102. class GrowPolicy = default_grow_policy,
  103. class Allocator = std::allocator<T> >
  104. class auto_buffer;
  105. template
  106. <
  107. class T,
  108. class StackBufferPolicy,
  109. class GrowPolicy,
  110. class Allocator
  111. >
  112. class auto_buffer : Allocator
  113. {
  114. private:
  115. enum { N = auto_buffer_detail::
  116. compute_buffer_objects<StackBufferPolicy,T>::value };
  117. BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
  118. typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
  119. local_buffer;
  120. public:
  121. typedef Allocator allocator_type;
  122. typedef T value_type;
  123. typedef typename Allocator::size_type size_type;
  124. typedef typename Allocator::difference_type difference_type;
  125. typedef T* pointer;
  126. #ifdef BOOST_NO_CXX11_ALLOCATOR
  127. typedef typename Allocator::pointer allocator_pointer;
  128. #else
  129. typedef typename std::allocator_traits<Allocator>::pointer allocator_pointer;
  130. #endif
  131. typedef const T* const_pointer;
  132. typedef T& reference;
  133. typedef const T& const_reference;
  134. typedef pointer iterator;
  135. typedef const_pointer const_iterator;
  136. typedef boost::reverse_iterator<iterator> reverse_iterator;
  137. typedef boost::reverse_iterator<const_iterator> const_reverse_iterator;
  138. typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
  139. && sizeof(T) <= sizeof(long double),
  140. const value_type,
  141. const_reference >::type
  142. optimized_const_reference;
  143. private:
  144. pointer allocate( size_type capacity_arg )
  145. {
  146. if( capacity_arg > N )
  147. return &*get_allocator().allocate( capacity_arg );
  148. else
  149. return static_cast<T*>( members_.address() );
  150. }
  151. void deallocate( pointer where, size_type capacity_arg )
  152. {
  153. if( capacity_arg <= N )
  154. return;
  155. get_allocator().deallocate( allocator_pointer(where), capacity_arg );
  156. }
  157. template< class I >
  158. static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
  159. {
  160. copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
  161. }
  162. static void copy_rai( const T* begin, const T* end,
  163. pointer where, const boost::true_type& )
  164. {
  165. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  166. }
  167. template< class I, bool b >
  168. static void copy_rai( I begin, I end,
  169. pointer where, const boost::integral_constant<bool, b>& )
  170. {
  171. std::uninitialized_copy( begin, end, where );
  172. }
  173. template< class I >
  174. static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
  175. {
  176. std::uninitialized_copy( begin, end, where );
  177. }
  178. template< class I >
  179. static void copy_impl( I begin, I end, pointer where )
  180. {
  181. copy_impl( begin, end, where,
  182. typename std::iterator_traits<I>::iterator_category() );
  183. }
  184. template< class I, class I2 >
  185. static void assign_impl( I begin, I end, I2 where )
  186. {
  187. assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
  188. }
  189. template< class I, class I2 >
  190. static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
  191. {
  192. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  193. }
  194. template< class I, class I2 >
  195. static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
  196. {
  197. for( ; begin != end; ++begin, ++where )
  198. *where = *begin;
  199. }
  200. void unchecked_push_back_n( size_type n, const boost::true_type& )
  201. {
  202. std::uninitialized_fill( end(), end() + n, T() );
  203. size_ += n;
  204. }
  205. void unchecked_push_back_n( size_type n, const boost::false_type& )
  206. {
  207. for( size_type i = 0u; i < n; ++i )
  208. unchecked_push_back();
  209. }
  210. void auto_buffer_destroy( pointer where, const boost::false_type& )
  211. {
  212. (*where).~T();
  213. }
  214. void auto_buffer_destroy( pointer, const boost::true_type& )
  215. { }
  216. void auto_buffer_destroy( pointer where )
  217. {
  218. auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
  219. }
  220. void auto_buffer_destroy()
  221. {
  222. BOOST_ASSERT( is_valid() );
  223. if( buffer_ ) // do we need this check? Yes, but only
  224. // for N = 0u + local instances in one_sided_swap()
  225. auto_buffer_destroy( boost::has_trivial_destructor<T>() );
  226. }
  227. void destroy_back_n( size_type n, const boost::false_type& )
  228. {
  229. BOOST_ASSERT( n > 0 );
  230. pointer buffer = buffer_ + size_ - 1u;
  231. pointer new_end = buffer - n;
  232. for( ; buffer > new_end; --buffer )
  233. auto_buffer_destroy( buffer );
  234. }
  235. void destroy_back_n( size_type, const boost::true_type& )
  236. { }
  237. void destroy_back_n( size_type n )
  238. {
  239. destroy_back_n( n, boost::has_trivial_destructor<T>() );
  240. }
  241. void auto_buffer_destroy( const boost::false_type& x )
  242. {
  243. if( size_ )
  244. destroy_back_n( size_, x );
  245. deallocate( buffer_, members_.capacity_ );
  246. }
  247. void auto_buffer_destroy( const boost::true_type& )
  248. {
  249. deallocate( buffer_, members_.capacity_ );
  250. }
  251. pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
  252. {
  253. pointer new_buffer = allocate( new_capacity ); // strong
  254. scope_guard guard = make_obj_guard( *this,
  255. &auto_buffer::deallocate,
  256. new_buffer,
  257. new_capacity );
  258. copy_impl( begin(), end(), new_buffer ); // strong
  259. guard.dismiss(); // nothrow
  260. return new_buffer;
  261. }
  262. pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
  263. {
  264. pointer new_buffer = allocate( new_capacity ); // strong
  265. copy_impl( begin(), end(), new_buffer ); // nothrow
  266. return new_buffer;
  267. }
  268. void reserve_impl( size_type new_capacity )
  269. {
  270. pointer new_buffer = move_to_new_buffer( new_capacity,
  271. boost::has_nothrow_copy<T>() );
  272. auto_buffer_destroy();
  273. buffer_ = new_buffer;
  274. members_.capacity_ = new_capacity;
  275. BOOST_ASSERT( size_ <= members_.capacity_ );
  276. }
  277. size_type new_capacity_impl( size_type n )
  278. {
  279. BOOST_ASSERT( n > members_.capacity_ );
  280. size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
  281. // @todo: consider to check for allocator.max_size()
  282. return (std::max)(new_capacity,n);
  283. }
  284. static void swap_helper( auto_buffer& l, auto_buffer& r,
  285. const boost::true_type& )
  286. {
  287. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  288. auto_buffer temp( l.begin(), l.end() );
  289. assign_impl( r.begin(), r.end(), l.begin() );
  290. assign_impl( temp.begin(), temp.end(), r.begin() );
  291. boost::swap( l.size_, r.size_ );
  292. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  293. }
  294. static void swap_helper( auto_buffer& l, auto_buffer& r,
  295. const boost::false_type& )
  296. {
  297. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  298. size_type min_size = (std::min)(l.size_,r.size_);
  299. size_type max_size = (std::max)(l.size_,r.size_);
  300. size_type diff = max_size - min_size;
  301. auto_buffer* smallest = l.size_ == min_size ? &l : &r;
  302. auto_buffer* largest = smallest == &l ? &r : &l;
  303. // @remark: the implementation below is not as fast
  304. // as it could be if we assumed T had a default
  305. // constructor.
  306. size_type i = 0u;
  307. for( ; i < min_size; ++i )
  308. boost::swap( (*smallest)[i], (*largest)[i] );
  309. for( ; i < max_size; ++i )
  310. smallest->unchecked_push_back( (*largest)[i] );
  311. largest->pop_back_n( diff );
  312. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  313. }
  314. void one_sided_swap( auto_buffer& temp ) // nothrow
  315. {
  316. BOOST_ASSERT( !temp.is_on_stack() );
  317. auto_buffer_destroy();
  318. // @remark: must be nothrow
  319. get_allocator() = temp.get_allocator();
  320. members_.capacity_ = temp.members_.capacity_;
  321. buffer_ = temp.buffer_;
  322. BOOST_ASSERT( temp.size_ >= size_ + 1u );
  323. size_ = temp.size_;
  324. temp.buffer_ = 0;
  325. BOOST_ASSERT( temp.is_valid() );
  326. }
  327. template< class I >
  328. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  329. std::input_iterator_tag )
  330. {
  331. for( ; begin_arg != end_arg; ++begin_arg )
  332. {
  333. before = insert( before, *begin_arg );
  334. ++before;
  335. }
  336. }
  337. void grow_back( size_type n, const boost::true_type& )
  338. {
  339. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  340. size_ += n;
  341. }
  342. void grow_back( size_type n, const boost::false_type& )
  343. {
  344. unchecked_push_back_n(n);
  345. }
  346. void grow_back( size_type n )
  347. {
  348. grow_back( n, boost::has_trivial_constructor<T>() );
  349. }
  350. void grow_back_one( const boost::true_type& )
  351. {
  352. BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
  353. size_ += 1;
  354. }
  355. void grow_back_one( const boost::false_type& )
  356. {
  357. unchecked_push_back();
  358. }
  359. void grow_back_one()
  360. {
  361. grow_back_one( boost::has_trivial_constructor<T>() );
  362. }
  363. template< class I >
  364. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  365. std::forward_iterator_tag )
  366. {
  367. difference_type n = std::distance(begin_arg, end_arg);
  368. if( size_ + n <= members_.capacity_ )
  369. {
  370. bool is_back_insertion = before == cend();
  371. if( !is_back_insertion )
  372. {
  373. grow_back( n );
  374. iterator where = const_cast<T*>(before);
  375. std::copy( before, cend() - n, where + n );
  376. assign_impl( begin_arg, end_arg, where );
  377. }
  378. else
  379. {
  380. unchecked_push_back( begin_arg, end_arg );
  381. }
  382. BOOST_ASSERT( is_valid() );
  383. return;
  384. }
  385. auto_buffer temp( new_capacity_impl( size_ + n ) );
  386. temp.unchecked_push_back( cbegin(), before );
  387. temp.unchecked_push_back( begin_arg, end_arg );
  388. temp.unchecked_push_back( before, cend() );
  389. one_sided_swap( temp );
  390. BOOST_ASSERT( is_valid() );
  391. }
  392. public:
  393. bool is_valid() const // invariant
  394. {
  395. // @remark: allowed for N==0 and when
  396. // using a locally instance
  397. // in insert()/one_sided_swap()
  398. if( buffer_ == 0 )
  399. return true;
  400. if( members_.capacity_ < N )
  401. return false;
  402. if( !is_on_stack() && members_.capacity_ <= N )
  403. return false;
  404. if( buffer_ == members_.address() )
  405. if( members_.capacity_ > N )
  406. return false;
  407. if( size_ > members_.capacity_ )
  408. return false;
  409. return true;
  410. }
  411. auto_buffer()
  412. : members_( N ),
  413. buffer_( static_cast<T*>(members_.address()) ),
  414. size_( 0u )
  415. {
  416. BOOST_ASSERT( is_valid() );
  417. }
  418. auto_buffer( const auto_buffer& r )
  419. : members_( (std::max)(r.size_,size_type(N)) ),
  420. buffer_( allocate( members_.capacity_ ) ),
  421. size_( 0 )
  422. {
  423. copy_impl( r.begin(), r.end(), buffer_ );
  424. size_ = r.size_;
  425. BOOST_ASSERT( is_valid() );
  426. }
  427. auto_buffer& operator=( const auto_buffer& r ) // basic
  428. {
  429. if( this == &r )
  430. return *this;
  431. difference_type diff = size_ - r.size_;
  432. if( diff >= 0 )
  433. {
  434. pop_back_n( static_cast<size_type>(diff) );
  435. assign_impl( r.begin(), r.end(), begin() );
  436. }
  437. else
  438. {
  439. if( members_.capacity_ >= r.size() )
  440. {
  441. unchecked_push_back_n( static_cast<size_type>(-diff) );
  442. assign_impl( r.begin(), r.end(), begin() );
  443. }
  444. else
  445. {
  446. // @remark: we release memory as early as possible
  447. // since we only give the basic guarantee
  448. auto_buffer_destroy();
  449. buffer_ = 0;
  450. pointer new_buffer = allocate( r.size() );
  451. scope_guard guard = make_obj_guard( *this,
  452. &auto_buffer::deallocate,
  453. new_buffer,
  454. r.size() );
  455. copy_impl( r.begin(), r.end(), new_buffer );
  456. guard.dismiss();
  457. buffer_ = new_buffer;
  458. members_.capacity_ = r.size();
  459. size_ = members_.capacity_;
  460. }
  461. }
  462. BOOST_ASSERT( size() == r.size() );
  463. BOOST_ASSERT( is_valid() );
  464. return *this;
  465. }
  466. explicit auto_buffer( size_type capacity_arg )
  467. : members_( (std::max)(capacity_arg, size_type(N)) ),
  468. buffer_( allocate(members_.capacity_) ),
  469. size_( 0 )
  470. {
  471. BOOST_ASSERT( is_valid() );
  472. }
  473. auto_buffer( size_type size_arg, optimized_const_reference init_value )
  474. : members_( (std::max)(size_arg, size_type(N)) ),
  475. buffer_( allocate(members_.capacity_) ),
  476. size_( 0 )
  477. {
  478. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  479. size_ = size_arg;
  480. BOOST_ASSERT( is_valid() );
  481. }
  482. auto_buffer( size_type capacity_arg, const allocator_type& a )
  483. : allocator_type( a ),
  484. members_( (std::max)(capacity_arg, size_type(N)) ),
  485. buffer_( allocate(members_.capacity_) ),
  486. size_( 0 )
  487. {
  488. BOOST_ASSERT( is_valid() );
  489. }
  490. auto_buffer( size_type size_arg, optimized_const_reference init_value,
  491. const allocator_type& a )
  492. : allocator_type( a ),
  493. members_( (std::max)(size_arg, size_type(N)) ),
  494. buffer_( allocate(members_.capacity_) ),
  495. size_( 0 )
  496. {
  497. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  498. size_ = size_arg;
  499. BOOST_ASSERT( is_valid() );
  500. }
  501. template< class ForwardIterator >
  502. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
  503. :
  504. members_( std::distance(begin_arg, end_arg) ),
  505. buffer_( allocate(members_.capacity_) ),
  506. size_( 0 )
  507. {
  508. copy_impl( begin_arg, end_arg, buffer_ );
  509. size_ = members_.capacity_;
  510. if( members_.capacity_ < N )
  511. members_.capacity_ = N;
  512. BOOST_ASSERT( is_valid() );
  513. }
  514. template< class ForwardIterator >
  515. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
  516. const allocator_type& a )
  517. : allocator_type( a ),
  518. members_( std::distance(begin_arg, end_arg) ),
  519. buffer_( allocate(members_.capacity_) ),
  520. size_( 0 )
  521. {
  522. copy_impl( begin_arg, end_arg, buffer_ );
  523. size_ = members_.capacity_;
  524. if( members_.capacity_ < N )
  525. members_.capacity_ = N;
  526. BOOST_ASSERT( is_valid() );
  527. }
  528. ~auto_buffer()
  529. {
  530. auto_buffer_destroy();
  531. }
  532. public:
  533. bool empty() const
  534. {
  535. return size_ == 0;
  536. }
  537. bool full() const
  538. {
  539. return size_ == members_.capacity_;
  540. }
  541. bool is_on_stack() const
  542. {
  543. return members_.capacity_ <= N;
  544. }
  545. size_type size() const
  546. {
  547. return size_;
  548. }
  549. size_type capacity() const
  550. {
  551. return members_.capacity_;
  552. }
  553. public:
  554. pointer data()
  555. {
  556. return buffer_;
  557. }
  558. const_pointer data() const
  559. {
  560. return buffer_;
  561. }
  562. allocator_type& get_allocator()
  563. {
  564. return static_cast<allocator_type&>(*this);
  565. }
  566. const allocator_type& get_allocator() const
  567. {
  568. return static_cast<const allocator_type&>(*this);
  569. }
  570. public:
  571. iterator begin()
  572. {
  573. return buffer_;
  574. }
  575. const_iterator begin() const
  576. {
  577. return buffer_;
  578. }
  579. iterator end()
  580. {
  581. return buffer_ + size_;
  582. }
  583. const_iterator end() const
  584. {
  585. return buffer_ + size_;
  586. }
  587. reverse_iterator rbegin()
  588. {
  589. return reverse_iterator(end());
  590. }
  591. const_reverse_iterator rbegin() const
  592. {
  593. return const_reverse_iterator(end());
  594. }
  595. reverse_iterator rend()
  596. {
  597. return reverse_iterator(begin());
  598. }
  599. const_reverse_iterator rend() const
  600. {
  601. return const_reverse_iterator(begin());
  602. }
  603. const_iterator cbegin() const
  604. {
  605. return const_cast<const auto_buffer*>(this)->begin();
  606. }
  607. const_iterator cend() const
  608. {
  609. return const_cast<const auto_buffer*>(this)->end();
  610. }
  611. const_reverse_iterator crbegin() const
  612. {
  613. return const_cast<const auto_buffer*>(this)->rbegin();
  614. }
  615. const_reverse_iterator crend() const
  616. {
  617. return const_cast<const auto_buffer*>(this)->rend();
  618. }
  619. public:
  620. reference front()
  621. {
  622. return buffer_[0];
  623. }
  624. optimized_const_reference front() const
  625. {
  626. return buffer_[0];
  627. }
  628. reference back()
  629. {
  630. return buffer_[size_-1];
  631. }
  632. optimized_const_reference back() const
  633. {
  634. return buffer_[size_-1];
  635. }
  636. reference operator[]( size_type n )
  637. {
  638. BOOST_ASSERT( n < size_ );
  639. return buffer_[n];
  640. }
  641. optimized_const_reference operator[]( size_type n ) const
  642. {
  643. BOOST_ASSERT( n < size_ );
  644. return buffer_[n];
  645. }
  646. void unchecked_push_back()
  647. {
  648. BOOST_ASSERT( !full() );
  649. new (buffer_ + size_) T;
  650. ++size_;
  651. }
  652. void unchecked_push_back_n( size_type n )
  653. {
  654. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  655. unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
  656. }
  657. void unchecked_push_back( optimized_const_reference x ) // non-growing
  658. {
  659. BOOST_ASSERT( !full() );
  660. new (buffer_ + size_) T( x );
  661. ++size_;
  662. }
  663. template< class ForwardIterator >
  664. void unchecked_push_back( ForwardIterator begin_arg,
  665. ForwardIterator end_arg ) // non-growing
  666. {
  667. BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
  668. copy_impl( begin_arg, end_arg, buffer_ + size_ );
  669. size_ += std::distance(begin_arg, end_arg);
  670. }
  671. void reserve_precisely( size_type n )
  672. {
  673. BOOST_ASSERT( members_.capacity_ >= N );
  674. if( n <= members_.capacity_ )
  675. return;
  676. reserve_impl( n );
  677. BOOST_ASSERT( members_.capacity_ == n );
  678. }
  679. void reserve( size_type n ) // strong
  680. {
  681. BOOST_ASSERT( members_.capacity_ >= N );
  682. if( n <= members_.capacity_ )
  683. return;
  684. reserve_impl( new_capacity_impl( n ) );
  685. BOOST_ASSERT( members_.capacity_ >= n );
  686. }
  687. void push_back()
  688. {
  689. if( size_ != members_.capacity_ )
  690. {
  691. unchecked_push_back();
  692. }
  693. else
  694. {
  695. reserve( size_ + 1u );
  696. unchecked_push_back();
  697. }
  698. }
  699. void push_back( optimized_const_reference x )
  700. {
  701. if( size_ != members_.capacity_ )
  702. {
  703. unchecked_push_back( x );
  704. }
  705. else
  706. {
  707. reserve( size_ + 1u );
  708. unchecked_push_back( x );
  709. }
  710. }
  711. template< class ForwardIterator >
  712. void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
  713. {
  714. difference_type diff = std::distance(begin_arg, end_arg);
  715. if( size_ + diff > members_.capacity_ )
  716. reserve( size_ + diff );
  717. unchecked_push_back( begin_arg, end_arg );
  718. }
  719. iterator insert( const_iterator before, optimized_const_reference x ) // basic
  720. {
  721. // @todo: consider if we want to support x in 'this'
  722. if( size_ < members_.capacity_ )
  723. {
  724. bool is_back_insertion = before == cend();
  725. iterator where = const_cast<T*>(before);
  726. if( !is_back_insertion )
  727. {
  728. grow_back_one();
  729. std::copy( before, cend() - 1u, where + 1u );
  730. *where = x;
  731. BOOST_ASSERT( is_valid() );
  732. }
  733. else
  734. {
  735. unchecked_push_back( x );
  736. }
  737. return where;
  738. }
  739. auto_buffer temp( new_capacity_impl( size_ + 1u ) );
  740. temp.unchecked_push_back( cbegin(), before );
  741. iterator result = temp.end();
  742. temp.unchecked_push_back( x );
  743. temp.unchecked_push_back( before, cend() );
  744. one_sided_swap( temp );
  745. BOOST_ASSERT( is_valid() );
  746. return result;
  747. }
  748. void insert( const_iterator before, size_type n,
  749. optimized_const_reference x )
  750. {
  751. // @todo: see problems above
  752. if( size_ + n <= members_.capacity_ )
  753. {
  754. grow_back( n );
  755. iterator where = const_cast<T*>(before);
  756. std::copy( before, cend() - n, where + n );
  757. std::fill( where, where + n, x );
  758. BOOST_ASSERT( is_valid() );
  759. return;
  760. }
  761. auto_buffer temp( new_capacity_impl( size_ + n ) );
  762. temp.unchecked_push_back( cbegin(), before );
  763. std::uninitialized_fill_n( temp.end(), n, x );
  764. temp.size_ += n;
  765. temp.unchecked_push_back( before, cend() );
  766. one_sided_swap( temp );
  767. BOOST_ASSERT( is_valid() );
  768. }
  769. template< class ForwardIterator >
  770. void insert( const_iterator before,
  771. ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
  772. {
  773. typedef typename std::iterator_traits<ForwardIterator>
  774. ::iterator_category category;
  775. insert_impl( before, begin_arg, end_arg, category() );
  776. }
  777. void pop_back()
  778. {
  779. BOOST_ASSERT( !empty() );
  780. auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
  781. --size_;
  782. }
  783. void pop_back_n( size_type n )
  784. {
  785. BOOST_ASSERT( n <= size_ );
  786. if( n )
  787. {
  788. destroy_back_n( n );
  789. size_ -= n;
  790. }
  791. }
  792. void clear()
  793. {
  794. pop_back_n( size_ );
  795. }
  796. iterator erase( const_iterator where )
  797. {
  798. BOOST_ASSERT( !empty() );
  799. BOOST_ASSERT( cbegin() <= where );
  800. BOOST_ASSERT( cend() > where );
  801. unsigned elements = cend() - where - 1u;
  802. if( elements > 0u )
  803. {
  804. const_iterator start = where + 1u;
  805. std::copy( start, start + elements,
  806. const_cast<T*>(where) );
  807. }
  808. pop_back();
  809. BOOST_ASSERT( !full() );
  810. iterator result = const_cast<T*>( where );
  811. BOOST_ASSERT( result <= end() );
  812. return result;
  813. }
  814. iterator erase( const_iterator from, const_iterator to )
  815. {
  816. BOOST_ASSERT( !(std::distance(from,to)>0) ||
  817. !empty() );
  818. BOOST_ASSERT( cbegin() <= from );
  819. BOOST_ASSERT( cend() >= to );
  820. unsigned elements = std::distance(to,cend());
  821. if( elements > 0u )
  822. {
  823. BOOST_ASSERT( elements > 0u );
  824. std::copy( to, to + elements,
  825. const_cast<T*>(from) );
  826. }
  827. pop_back_n( std::distance(from,to) );
  828. BOOST_ASSERT( !full() );
  829. iterator result = const_cast<T*>( from );
  830. BOOST_ASSERT( result <= end() );
  831. return result;
  832. }
  833. void shrink_to_fit()
  834. {
  835. if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
  836. return;
  837. reserve_impl( size_ );
  838. members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
  839. BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
  840. BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
  841. }
  842. pointer uninitialized_grow( size_type n ) // strong
  843. {
  844. if( size_ + n > members_.capacity_ )
  845. reserve( size_ + n );
  846. pointer res = end();
  847. size_ += n;
  848. return res;
  849. }
  850. void uninitialized_shrink( size_type n ) // nothrow
  851. {
  852. // @remark: test for wrap-around
  853. BOOST_ASSERT( size_ - n <= members_.capacity_ );
  854. size_ -= n;
  855. }
  856. void uninitialized_resize( size_type n )
  857. {
  858. if( n > size() )
  859. uninitialized_grow( n - size() );
  860. else if( n < size() )
  861. uninitialized_shrink( size() - n );
  862. BOOST_ASSERT( size() == n );
  863. }
  864. // nothrow - if both buffer are on the heap, or
  865. // - if one buffer is on the heap and one has
  866. // 'has_allocated_buffer() == false', or
  867. // - if copy-construction cannot throw
  868. // basic - otherwise (better guarantee impossible)
  869. // requirement: the allocator must be no-throw-swappable
  870. void swap( auto_buffer& r )
  871. {
  872. bool on_stack = is_on_stack();
  873. bool r_on_stack = r.is_on_stack();
  874. bool both_on_heap = !on_stack && !r_on_stack;
  875. if( both_on_heap )
  876. {
  877. boost::swap( get_allocator(), r.get_allocator() );
  878. boost::swap( members_.capacity_, r.members_.capacity_ );
  879. boost::swap( buffer_, r.buffer_ );
  880. boost::swap( size_, r.size_ );
  881. BOOST_ASSERT( is_valid() );
  882. BOOST_ASSERT( r.is_valid() );
  883. return;
  884. }
  885. BOOST_ASSERT( on_stack || r_on_stack );
  886. bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
  887. (!on_stack && r_on_stack);
  888. //
  889. // Remark: we now know that we can copy into
  890. // the unused stack buffer.
  891. //
  892. if( exactly_one_on_stack )
  893. {
  894. auto_buffer* one_on_stack = on_stack ? this : &r;
  895. auto_buffer* other = on_stack ? &r : this;
  896. pointer new_buffer = static_cast<T*>(other->members_.address());
  897. copy_impl( one_on_stack->begin(), one_on_stack->end(),
  898. new_buffer ); // strong
  899. one_on_stack->auto_buffer_destroy(); // nothrow
  900. boost::swap( get_allocator(), r.get_allocator() ); // assume nothrow
  901. boost::swap( members_.capacity_, r.members_.capacity_ );
  902. boost::swap( size_, r.size_ );
  903. one_on_stack->buffer_ = other->buffer_;
  904. other->buffer_ = new_buffer;
  905. BOOST_ASSERT( other->is_on_stack() );
  906. BOOST_ASSERT( !one_on_stack->is_on_stack() );
  907. BOOST_ASSERT( is_valid() );
  908. BOOST_ASSERT( r.is_valid() );
  909. return;
  910. }
  911. BOOST_ASSERT( on_stack && r_on_stack );
  912. swap_helper( *this, r, boost::has_trivial_assign<T>() );
  913. BOOST_ASSERT( is_valid() );
  914. BOOST_ASSERT( r.is_valid() );
  915. }
  916. private:
  917. typedef boost::aligned_storage< N * sizeof(T),
  918. boost::alignment_of<T>::value >
  919. storage;
  920. struct members_type : storage /* to enable EBO */
  921. {
  922. size_type capacity_;
  923. members_type( size_type capacity )
  924. : capacity_(capacity)
  925. { }
  926. void* address() const
  927. { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
  928. };
  929. members_type members_;
  930. pointer buffer_;
  931. size_type size_;
  932. };
  933. template< class T, class SBP, class GP, class A >
  934. inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
  935. {
  936. l.swap( r );
  937. }
  938. template< class T, class SBP, class GP, class A >
  939. inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
  940. const auto_buffer<T,SBP,GP,A>& r )
  941. {
  942. if( l.size() != r.size() )
  943. return false;
  944. return std::equal( l.begin(), l.end(), r.begin() );
  945. }
  946. template< class T, class SBP, class GP, class A >
  947. inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
  948. const auto_buffer<T,SBP,GP,A>& r )
  949. {
  950. return !(l == r);
  951. }
  952. template< class T, class SBP, class GP, class A >
  953. inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
  954. const auto_buffer<T,SBP,GP,A>& r )
  955. {
  956. return std::lexicographical_compare( l.begin(), l.end(),
  957. r.begin(), r.end() );
  958. }
  959. template< class T, class SBP, class GP, class A >
  960. inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
  961. const auto_buffer<T,SBP,GP,A>& r )
  962. {
  963. return (r < l);
  964. }
  965. template< class T, class SBP, class GP, class A >
  966. inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
  967. const auto_buffer<T,SBP,GP,A>& r )
  968. {
  969. return !(l > r);
  970. }
  971. template< class T, class SBP, class GP, class A >
  972. inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
  973. const auto_buffer<T,SBP,GP,A>& r )
  974. {
  975. return !(l < r);
  976. }
  977. } // namespace detail
  978. } // namespace signals2
  979. }
  980. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  981. #pragma warning(pop)
  982. #endif
  983. #endif