📄 stl_rope.h
字号:
(const _Rope_const_iterator<_CharT,_Alloc>& __x, const _Rope_const_iterator<_CharT,_Alloc>& __y); friend ptrdiff_t operator- __STL_NULL_TMPL_ARGS (const _Rope_const_iterator<_CharT,_Alloc>& __x, const _Rope_const_iterator<_CharT,_Alloc>& __y);};template<class _CharT, class _Alloc>class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> { friend class rope<_CharT,_Alloc>; protected: rope<_CharT,_Alloc>* _M_root_rope; // root is treated as a cached version of this, // and is used to detect changes to the underlying // rope. // Root is included in the reference count. // This is necessary so that we can detect changes reliably. // Unfortunately, it requires careful bookkeeping for the // nonGC case. _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos) : _Rope_iterator_base<_CharT,_Alloc>(__r->_M_tree_ptr, __pos), _M_root_rope(__r) { _RopeRep::_S_ref(_M_root); } void _M_check(); public: typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference; typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer; public: rope<_CharT,_Alloc>& container() { return *_M_root_rope; } _Rope_iterator() { _M_root = 0; // Needed for reference counting. }; _Rope_iterator(const _Rope_iterator& __x) : _Rope_iterator_base<_CharT,_Alloc>(__x) { _M_root_rope = __x._M_root_rope; _RopeRep::_S_ref(_M_root); } _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos); ~_Rope_iterator() { _RopeRep::_S_unref(_M_root); } _Rope_iterator& operator= (const _Rope_iterator& __x) { _RopeRep* __old = _M_root; _RopeRep::_S_ref(__x._M_root); if (0 != __x._M_buf_ptr) { _M_root_rope = __x._M_root_rope; *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x; } else { _M_current_pos = __x._M_current_pos; _M_root = __x._M_root; _M_root_rope = __x._M_root_rope; _M_buf_ptr = 0; } _RopeRep::_S_unref(__old); return(*this); } reference operator*() { _M_check(); if (0 == _M_buf_ptr) { return _Rope_char_ref_proxy<_CharT,_Alloc>( _M_root_rope, _M_current_pos); } else { return _Rope_char_ref_proxy<_CharT,_Alloc>( _M_root_rope, _M_current_pos, *_M_buf_ptr); } } _Rope_iterator& operator++() { _M_incr(1); return *this; } _Rope_iterator& operator+=(difference_type __n) { if (__n >= 0) { _M_incr(__n); } else { _M_decr(-__n); } return *this; } _Rope_iterator& operator--() { _M_decr(1); return *this; } _Rope_iterator& operator-=(difference_type __n) { if (__n >= 0) { _M_decr(__n); } else { _M_incr(-__n); } return *this; } _Rope_iterator operator++(int) { size_t __old_pos = _M_current_pos; _M_incr(1); return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos); } _Rope_iterator operator--(int) { size_t __old_pos = _M_current_pos; _M_decr(1); return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos); } reference operator[](ptrdiff_t __n) { return _Rope_char_ref_proxy<_CharT,_Alloc>( _M_root_rope, _M_current_pos + __n); } friend bool operator== __STL_NULL_TMPL_ARGS (const _Rope_iterator<_CharT,_Alloc>& __x, const _Rope_iterator<_CharT,_Alloc>& __y); friend bool operator< __STL_NULL_TMPL_ARGS (const _Rope_iterator<_CharT,_Alloc>& __x, const _Rope_iterator<_CharT,_Alloc>& __y); friend ptrdiff_t operator- __STL_NULL_TMPL_ARGS (const _Rope_iterator<_CharT,_Alloc>& __x, const _Rope_iterator<_CharT,_Alloc>& __y); friend _Rope_iterator<_CharT,_Alloc> operator- __STL_NULL_TMPL_ARGS (const _Rope_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n); friend _Rope_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS (const _Rope_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n); friend _Rope_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS (ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x);};#if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)#pragma reset woff 1375#endif// The rope base class encapsulates// the differences between SGI-style allocators and standard-conforming// allocators.#ifdef __STL_USE_STD_ALLOCATORS// Base class for ordinary allocators.template <class _CharT, class _Allocator, bool _IsStatic>class _Rope_alloc_base {public: typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep; typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type allocator_type; allocator_type get_allocator() const { return _M_data_allocator; } _Rope_alloc_base(_RopeRep *__t, const allocator_type& __a) : _M_tree_ptr(__t), _M_data_allocator(__a) {} _Rope_alloc_base(const allocator_type& __a) : _M_data_allocator(__a) {} protected: // The only data members of a rope: allocator_type _M_data_allocator; _RopeRep* _M_tree_ptr;# define __ROPE_DEFINE_ALLOC(_T, __name) \ typedef typename \ _Alloc_traits<_T,_Allocator>::allocator_type __name##Allocator; \ _T* __name##_allocate(size_t __n) const \ { return __name##Allocator(_M_data_allocator).allocate(__n); } \ void __name##_deallocate(_T *__p, size_t __n) const \ { __name##Allocator(_M_data_allocator).deallocate(__p, __n); } __ROPE_DEFINE_ALLOCS(_Allocator)# undef __ROPE_DEFINE_ALLOC};// Specialization for allocators that have the property that we don't// actually have to store an allocator object. template <class _CharT, class _Allocator>class _Rope_alloc_base<_CharT,_Allocator,true> {public: typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep; typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type allocator_type; allocator_type get_allocator() const { return allocator_type(); } _Rope_alloc_base(_RopeRep *__t, const allocator_type&) : _M_tree_ptr(__t) {} _Rope_alloc_base(const allocator_type&) {} protected: // The only data member of a rope: _RopeRep *_M_tree_ptr;# define __ROPE_DEFINE_ALLOC(_T, __name) \ typedef typename \ _Alloc_traits<_T,_Allocator>::_Alloc_type __name##Alloc; \ typedef typename \ _Alloc_traits<_T,_Allocator>::allocator_type __name##Allocator; \ static _T* __name##_allocate(size_t __n) \ { return __name##Alloc::allocate(__n); } \ static void __name##_deallocate(_T *__p, size_t __n) \ { __name##Alloc::deallocate(__p, __n); } __ROPE_DEFINE_ALLOCS(_Allocator)# undef __ROPE_DEFINE_ALLOC};template <class _CharT, class _Alloc>struct _Rope_base : public _Rope_alloc_base<_CharT,_Alloc, _Alloc_traits<_CharT,_Alloc>::_S_instanceless>{ typedef _Rope_alloc_base<_CharT,_Alloc, _Alloc_traits<_CharT,_Alloc>::_S_instanceless> _Base; typedef typename _Base::allocator_type allocator_type; _Rope_base(_RopeRep* __t, const allocator_type& __a) : _Base(__t, __a) {} _Rope_base(const allocator_type& __a) : _Base(__a) {}}; #else /* !__STL_USE_STD_ALLOCATORS */template <class _CharT, class _Alloc> class _Rope_base {public: typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep; typedef _Alloc allocator_type; static allocator_type get_allocator() { return allocator_type(); } _Rope_base(_RopeRep * __t, const allocator_type&) : _M_tree_ptr(__t) {} _Rope_base(const allocator_type&) {}protected: // The only data member of a rope: _RopeRep* _M_tree_ptr;# define __ROPE_DEFINE_ALLOC(_T, __name) \ typedef simple_alloc<_T, _Alloc> __name##Alloc; \ static _T* __name##_allocate(size_t __n) \ { return __name##Alloc::allocate(__n); } \ static void __name##_deallocate(_T *__p, size_t __n) \ { __name##Alloc::deallocate(__p, __n); } __ROPE_DEFINE_ALLOCS(_Alloc)# undef __ROPE_DEFINE_ALLOC};#endif /* __STL_USE_STD_ALLOCATORS */template <class _CharT, class _Alloc>class rope : public _Rope_base<_CharT,_Alloc> { public: typedef _CharT value_type; typedef ptrdiff_t difference_type; typedef size_t size_type; typedef _CharT const_reference; typedef const _CharT* const_pointer; typedef _Rope_iterator<_CharT,_Alloc> iterator; typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator; typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference; typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer; friend class _Rope_iterator<_CharT,_Alloc>; friend class _Rope_const_iterator<_CharT,_Alloc>; friend struct _Rope_RopeRep<_CharT,_Alloc>; friend class _Rope_iterator_base<_CharT,_Alloc>; friend class _Rope_char_ptr_proxy<_CharT,_Alloc>; friend class _Rope_char_ref_proxy<_CharT,_Alloc>; friend struct _Rope_RopeSubstring<_CharT,_Alloc>; protected: typedef _Rope_base<_CharT,_Alloc> _Base; typedef typename _Base::allocator_type allocator_type;# ifdef __STL_USE_NAMESPACES using _Base::_M_tree_ptr;# endif typedef __GC_CONST _CharT* _Cstrptr;# ifdef __STL_SGI_THREADS static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {# if __mips < 3 || !(defined (_ABIN32) || defined(_ABI64)) return (_Cstrptr) test_and_set((unsigned long*)__p, (unsigned long)__q);# else return (_Cstrptr) __test_and_set((unsigned long*)__p, (unsigned long)__q);# endif }# elif defined(__STL_WIN32THREADS) static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) { return (_Cstrptr) InterlockedExchange( (LPLONG)__p, (LONG)__q); }# elif defined(_PTHREADS) // This should be portable, but performance is expected // to be quite awful. This really needs platform specific // code. static pthread_mutex_t _S_swap_lock; static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) { pthread_mutex_lock(&_S_swap_lock); _Cstrptr __result = *__p; *__p = __q; pthread_mutex_unlock(&_S_swap_lock); return __result; }# else static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) { _Cstrptr __result = *__p; *__p = __q; return __result; }# endif static _CharT _S_empty_c_str[1]; static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); } enum { _S_copy_max = 23 }; // For strings shorter than _S_copy_max, we copy to // concatenate. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep; typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation; typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf; typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction; typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring; // Retrieve a character at the indicated position. static _CharT _S_fetch(_RopeRep* __r, size_type __pos);# ifndef __GC // Obtain a pointer to the character at the indicated position. // The pointer can be used to change the character. // If such a pointer cannot be produced, as is frequently the // case, 0 is returned instead. // (Returns nonzero only if all nodes in the path have a refcount // of 1.) static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);# endif static bool _S_apply_to_pieces( // should be template parameter _Rope_char_consumer<_CharT>& __c, const _RopeRep* __r, size_t __begin, size_t __end); // begin and end are assumed to be in range.# ifndef __GC static void _S_unref(_RopeRep* __t) { _RopeRep::_S_unref(__t); } static void _S_ref(_RopeRep* __t) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -