⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 stl_rope.h

📁 俄罗斯高人Mamaich的Pocket gcc编译器(运行在PocketPC上)的全部源代码。
💻 H
📖 第 1 页 / 共 5 页
字号:
              }              break;            case _S_leaf:              {                __GC_CONST _CharT* __s =                        ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;                uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,                                     __buffer);              }              break;            default:	      break;        }    }    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type        allocator_type;    _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,                          size_t __l, allocator_type __a)      : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),        char_producer<_CharT>(),        _M_base(__b),        _M_start(__s)    {#       ifndef __GC            _M_base->_M_ref_nonnil();#       endif        _M_tag = _S_substringfn;    }    virtual ~_Rope_RopeSubstring()      { #       ifndef __GC          _M_base->_M_unref_nonnil();          // _M_free_c_string();  -- done by parent class#       endif      }};// Self-destructing pointers to Rope_rep.// These are not conventional smart pointers.  Their// only purpose in life is to ensure that unref is called// on the pointer either at normal exit or if an exception// is raised.  It is the caller's responsibility to// adjust reference counts when these pointers are initialized// or assigned to.  (This convention significantly reduces// the number of potentially expensive reference count// updates.)#ifndef __GC  template<class _CharT, class _Alloc>  struct _Rope_self_destruct_ptr {    _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;    ~_Rope_self_destruct_ptr()       { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }#ifdef __EXCEPTIONS        _Rope_self_destruct_ptr() : _M_ptr(0) {};#else        _Rope_self_destruct_ptr() {};#endif    _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}    _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }    _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }    operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }    _Rope_self_destruct_ptr& operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)        { _M_ptr = __x; return *this; }  };#endif// Dereferencing a nonconst iterator has to return something// that behaves almost like a reference.  It's not possible to// return an actual reference since assignment requires extra// work.  And we would get into the same problems as with the// CD2 version of basic_string.template<class _CharT, class _Alloc>class _Rope_char_ref_proxy {    friend class rope<_CharT,_Alloc>;    friend class _Rope_iterator<_CharT,_Alloc>;    friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;#   ifdef __GC        typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;#   else        typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;#   endif    typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;    typedef rope<_CharT,_Alloc> _My_rope;    size_t _M_pos;    _CharT _M_current;    bool _M_current_valid;    _My_rope* _M_root;     // The whole rope.  public:    _Rope_char_ref_proxy(_My_rope* __r, size_t __p)      :  _M_pos(__p), _M_current_valid(false), _M_root(__r) {}    _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x)      : _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}        // Don't preserve cache if the reference can outlive the        // expression.  We claim that's not possible without calling        // a copy constructor or generating reference to a proxy        // reference.  We declare the latter to have undefined semantics.    _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)      : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}    inline operator _CharT () const;    _Rope_char_ref_proxy& operator= (_CharT __c);    _Rope_char_ptr_proxy<_CharT,_Alloc> operator& () const;    _Rope_char_ref_proxy& operator= (const _Rope_char_ref_proxy& __c) {        return operator=((_CharT)__c);     }};template<class _CharT, class __Alloc>inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,                 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {    _CharT __tmp = __a;    __a = __b;    __b = __tmp;}template<class _CharT, class _Alloc>class _Rope_char_ptr_proxy {    // XXX this class should be rewritten.    friend class _Rope_char_ref_proxy<_CharT,_Alloc>;    size_t _M_pos;    rope<_CharT,_Alloc>* _M_root;     // The whole rope.  public:    _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)       : _M_pos(__x._M_pos), _M_root(__x._M_root) {}    _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x)      : _M_pos(__x._M_pos), _M_root(__x._M_root) {}    _Rope_char_ptr_proxy() {}    _Rope_char_ptr_proxy(_CharT* __x) : _M_root(0), _M_pos(0) {    }    _Rope_char_ptr_proxy&     operator= (const _Rope_char_ptr_proxy& __x) {        _M_pos = __x._M_pos;        _M_root = __x._M_root;        return *this;    }    template<class _CharT2, class _Alloc2>    friend bool operator== (const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __x,                            const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __y);    _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {        return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);    }};// Rope iterators:// Unlike in the C version, we cache only part of the stack// for rope iterators, since they must be efficiently copyable.// When we run out of cache, we have to reconstruct the iterator// value.// Pointers from iterators are not included in reference counts.// Iterators are assumed to be thread private.  Ropes can// be shared.template<class _CharT, class _Alloc>class _Rope_iterator_base  : public iterator<std::random_access_iterator_tag, _CharT>{    friend class rope<_CharT,_Alloc>;  public:    typedef _Alloc _allocator_type; // used in _Rope_rotate, VC++ workaround    typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;        // Borland doesn't want this to be protected.  protected:    enum { _S_path_cache_len = 4 }; // Must be <= 9.    enum { _S_iterator_buf_len = 15 };    size_t _M_current_pos;    _RopeRep* _M_root;     // The whole rope.    size_t _M_leaf_pos;    // Starting position for current leaf    __GC_CONST _CharT* _M_buf_start;                        // Buffer possibly                        // containing current char.    __GC_CONST _CharT* _M_buf_ptr;                        // Pointer to current char in buffer.                        // != 0 ==> buffer valid.    __GC_CONST _CharT* _M_buf_end;                        // One past __last valid char in buffer.    // What follows is the path cache.  We go out of our    // way to make this compact.    // Path_end contains the bottom section of the path from    // the root to the current leaf.    const _RopeRep* _M_path_end[_S_path_cache_len];    int _M_leaf_index;     // Last valid __pos in path_end;                        // _M_path_end[0] ... _M_path_end[leaf_index-1]                        // point to concatenation nodes.    unsigned char _M_path_directions;                          // (path_directions >> __i) & 1 is 1                          // iff we got from _M_path_end[leaf_index - __i - 1]                          // to _M_path_end[leaf_index - __i] by going to the                          // __right. Assumes path_cache_len <= 9.    _CharT _M_tmp_buf[_S_iterator_buf_len];                        // Short buffer for surrounding chars.                        // This is useful primarily for                         // RopeFunctions.  We put the buffer                        // here to avoid locking in the                        // multithreaded case.    // The cached path is generally assumed to be valid    // only if the buffer is valid.    static void _S_setbuf(_Rope_iterator_base& __x);                                        // Set buffer contents given                                        // path cache.    static void _S_setcache(_Rope_iterator_base& __x);                                        // Set buffer contents and                                        // path cache.    static void _S_setcache_for_incr(_Rope_iterator_base& __x);                                        // As above, but assumes path                                        // cache is valid for previous posn.    _Rope_iterator_base() {}    _Rope_iterator_base(_RopeRep* __root, size_t __pos)      : _M_current_pos(__pos), _M_root(__root), _M_buf_ptr(0) {}    void _M_incr(size_t __n);    void _M_decr(size_t __n);  public:    size_t index() const { return _M_current_pos; }    _Rope_iterator_base(const _Rope_iterator_base& __x) {        if (0 != __x._M_buf_ptr) {            *this = __x;        } else {            _M_current_pos = __x._M_current_pos;            _M_root = __x._M_root;            _M_buf_ptr = 0;        }    }};template<class _CharT, class _Alloc> class _Rope_iterator;template<class _CharT, class _Alloc>class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {    friend class rope<_CharT,_Alloc>;  protected:      typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;      // The one from the base class may not be directly visible.    _Rope_const_iterator(const _RopeRep* __root, size_t __pos):                   _Rope_iterator_base<_CharT,_Alloc>(                     const_cast<_RopeRep*>(__root), __pos)                   // Only nonconst iterators modify root ref count    {}  public:    typedef _CharT reference;   // Really a value.  Returning a reference                                // Would be a mess, since it would have                                // to be included in refcount.    typedef const _CharT* pointer;  public:    _Rope_const_iterator() {};    _Rope_const_iterator(const _Rope_const_iterator& __x) :                                _Rope_iterator_base<_CharT,_Alloc>(__x) { }    _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x);    _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :        _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) {}    _Rope_const_iterator& operator= (const _Rope_const_iterator& __x) {        if (0 != __x._M_buf_ptr) {            *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;        } else {            _M_current_pos = __x._M_current_pos;            _M_root = __x._M_root;            _M_buf_ptr = 0;        }        return(*this);    }    reference operator*() {        if (0 == _M_buf_ptr) _S_setcache(*this);        return *_M_buf_ptr;    }    _Rope_const_iterator& operator++() {        __GC_CONST _CharT* __next;        if (0 != _M_buf_ptr && (__next = _M_buf_ptr + 1) < _M_buf_end) {            _M_buf_ptr = __next;            ++_M_current_pos;        } else {            _M_incr(1);        }        return *this;    }    _Rope_const_iterator& operator+=(ptrdiff_t __n) {        if (__n >= 0) {            _M_incr(__n);        } else {            _M_decr(-__n);        }        return *this;    }    _Rope_const_iterator& operator--() {        _M_decr(1);        return *this;    }    _Rope_const_iterator& operator-=(ptrdiff_t __n) {        if (__n >= 0) {            _M_decr(__n);        } else {            _M_incr(-__n);        }        return *this;    }    _Rope_const_iterator operator++(int) {        size_t __old_pos = _M_current_pos;        _M_incr(1);        return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);        // This makes a subsequent dereference expensive.        // Perhaps we should instead copy the iterator        // if it has a valid cache?    }    _Rope_const_iterator operator--(int) {        size_t __old_pos = _M_current_pos;        _M_decr(1);        return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);    }    template<class _CharT2, class _Alloc2>    friend _Rope_const_iterator<_CharT2,_Alloc2> operator-        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,         ptrdiff_t __n);    template<class _CharT2, class _Alloc2>    friend _Rope_const_iterator<_CharT2,_Alloc2> operator+        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,         ptrdiff_t __n);    template<class _CharT2, class _Alloc2>    friend _Rope_const_iterator<_CharT2,_Alloc2> operator+        (ptrdiff_t __n,         const _Rope_const_iterator<_CharT2,_Alloc2>& __x);    reference operator[](size_t __n) {        return rope<_CharT,_Alloc>::_S_fetch(_M_root, _M_current_pos + __n);    }    template<class _CharT2, class _Alloc2>    friend bool operator==        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,         const _Rope_const_iterator<_CharT2,_Alloc2>& __y);    template<class _CharT2, class _Alloc2>    friend bool operator<         (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,         const _Rope_const_iterator<_CharT2,_Alloc2>& __y);    template<class _CharT2, class _Alloc2>    friend ptrdiff_t operator-        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,         const _Rope_const_iterator<_CharT2,_Alloc2>& __y);};template<class _CharT, class _Alloc>class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {    friend class rope<_CharT,_Alloc>;  protected:    typedef typename _Rope_iterator_base<_CharT,_Alloc>::_RopeRep _RopeRep;    rope<_CharT,_Alloc>* _M_root_rope;        // root is treated as a cached version of this,        // and is used to detect changes to the underlying        // rope.        // Root is included in the reference count.        // This is necessary so that we can detect changes reliably.

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -