⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 rope

📁 mingw32.rar
💻
📖 第 1 页 / 共 5 页
字号:
};
// Substring results are usually represented using just
// concatenation nodes.  But in the case of very long flat ropes
// or ropes with a functional representation that isn't practical.
// In that case, we represent the __result as a special case of
// RopeFunction, whose char_producer points back to the rope itself.
// In all cases except repeated substring operations and
// deallocation, we treat the __result as a RopeFunction.
template<class _CharT, class _Alloc>
struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
                             public char_producer<_CharT> {
  public:
    // XXX this whole class should be rewritten.
    _Rope_RopeRep<_CharT,_Alloc>* _M_base;      // not 0
    size_t _M_start;
    virtual void operator()(size_t __start_pos, size_t __req_len,
                            _CharT* __buffer) {
        switch(_M_base->_M_tag) {
            case _Rope_constants::_S_function:
            case _Rope_constants::_S_substringfn:
              {
                char_producer<_CharT>* __fn =
                        ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
                (*__fn)(__start_pos + _M_start, __req_len, __buffer);
              }
              break;
            case _Rope_constants::_S_leaf:
              {
                __GC_CONST _CharT* __s =
                        ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
                uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
                                     __buffer);
              }
              break;
            default:
	      break;
        }
    }
    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
        allocator_type;
    _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
                          size_t __l, allocator_type __a)
      : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
        char_producer<_CharT>(),
        _M_base(__b),
        _M_start(__s)
    {
#       ifndef __GC
            _M_base->_M_ref_nonnil();
#       endif
        this->_M_tag = _Rope_constants::_S_substringfn;
    }
    virtual ~_Rope_RopeSubstring() throw()
      {
#       ifndef __GC
          _M_base->_M_unref_nonnil();
          // _M_free_c_string();  -- done by parent class
#       endif
      }
};


// Self-destructing pointers to Rope_rep.
// These are not conventional smart pointers.  Their
// only purpose in life is to ensure that unref is called
// on the pointer either at normal exit or if an exception
// is raised.  It is the caller's responsibility to
// adjust reference counts when these pointers are initialized
// or assigned to.  (This convention significantly reduces
// the number of potentially expensive reference count
// updates.)
#ifndef __GC
  template<class _CharT, class _Alloc>
  struct _Rope_self_destruct_ptr {
    _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
    ~_Rope_self_destruct_ptr()
      { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
#ifdef __EXCEPTIONS
        _Rope_self_destruct_ptr() : _M_ptr(0) {};
#else
        _Rope_self_destruct_ptr() {};
#endif
    _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
    _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
    _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
    operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
    _Rope_self_destruct_ptr& operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
        { _M_ptr = __x; return *this; }
  };
#endif

// Dereferencing a nonconst iterator has to return something
// that behaves almost like a reference.  It's not possible to
// return an actual reference since assignment requires extra
// work.  And we would get into the same problems as with the
// CD2 version of basic_string.
template<class _CharT, class _Alloc>
class _Rope_char_ref_proxy {
    friend class rope<_CharT,_Alloc>;
    friend class _Rope_iterator<_CharT,_Alloc>;
    friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
#   ifdef __GC
        typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
#   else
        typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
#   endif
    typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    typedef rope<_CharT,_Alloc> _My_rope;
    size_t _M_pos;
    _CharT _M_current;
    bool _M_current_valid;
    _My_rope* _M_root;     // The whole rope.
  public:
    _Rope_char_ref_proxy(_My_rope* __r, size_t __p)
    :  _M_pos(__p), _M_current(), _M_current_valid(false), _M_root(__r) {}

    _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x)
    : _M_pos(__x._M_pos), _M_current(__x._M_current), _M_current_valid(false), 
      _M_root(__x._M_root) {}

  // Don't preserve cache if the reference can outlive the
  // expression.  We claim that's not possible without calling
  // a copy constructor or generating reference to a proxy
  // reference.  We declare the latter to have undefined semantics.
    _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
      : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
    inline operator _CharT () const;
    _Rope_char_ref_proxy& operator= (_CharT __c);
    _Rope_char_ptr_proxy<_CharT,_Alloc> operator& () const;
    _Rope_char_ref_proxy& operator= (const _Rope_char_ref_proxy& __c) {
        return operator=((_CharT)__c);
    }
};

template<class _CharT, class __Alloc>
inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
                 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
    _CharT __tmp = __a;
    __a = __b;
    __b = __tmp;
}

template<class _CharT, class _Alloc>
class _Rope_char_ptr_proxy {
    // XXX this class should be rewritten.
    friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
    size_t _M_pos;
    rope<_CharT,_Alloc>* _M_root;     // The whole rope.
  public:
    _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
      : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
    _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x)
      : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
    _Rope_char_ptr_proxy() {}
    _Rope_char_ptr_proxy(_CharT* __x) : _M_root(0), _M_pos(0) {
    }
    _Rope_char_ptr_proxy&
    operator= (const _Rope_char_ptr_proxy& __x) {
        _M_pos = __x._M_pos;
        _M_root = __x._M_root;
        return *this;
    }
    template<class _CharT2, class _Alloc2>
    friend bool operator== (const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __x,
                            const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __y);
    _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
        return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
    }
};


// Rope iterators:
// Unlike in the C version, we cache only part of the stack
// for rope iterators, since they must be efficiently copyable.
// When we run out of cache, we have to reconstruct the iterator
// value.
// Pointers from iterators are not included in reference counts.
// Iterators are assumed to be thread private.  Ropes can
// be shared.

template<class _CharT, class _Alloc>
class _Rope_iterator_base
  : public iterator<std::random_access_iterator_tag, _CharT>
{
    friend class rope<_CharT,_Alloc>;
  public:
    typedef _Alloc _allocator_type; // used in _Rope_rotate, VC++ workaround
    typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
        // Borland doesn't want this to be protected.
  protected:
    enum { _S_path_cache_len = 4 }; // Must be <= 9.
    enum { _S_iterator_buf_len = 15 };
    size_t _M_current_pos;
    _RopeRep* _M_root;     // The whole rope.
    size_t _M_leaf_pos;    // Starting position for current leaf
    __GC_CONST _CharT* _M_buf_start;
                        // Buffer possibly
                        // containing current char.
    __GC_CONST _CharT* _M_buf_ptr;
                        // Pointer to current char in buffer.
                        // != 0 ==> buffer valid.
    __GC_CONST _CharT* _M_buf_end;
                        // One past __last valid char in buffer.
    // What follows is the path cache.  We go out of our
    // way to make this compact.
    // Path_end contains the bottom section of the path from
    // the root to the current leaf.
    const _RopeRep* _M_path_end[_S_path_cache_len];
    int _M_leaf_index;     // Last valid __pos in path_end;
                        // _M_path_end[0] ... _M_path_end[leaf_index-1]
                        // point to concatenation nodes.
    unsigned char _M_path_directions;
                          // (path_directions >> __i) & 1 is 1
                          // iff we got from _M_path_end[leaf_index - __i - 1]
                          // to _M_path_end[leaf_index - __i] by going to the
                          // __right. Assumes path_cache_len <= 9.
    _CharT _M_tmp_buf[_S_iterator_buf_len];
                        // Short buffer for surrounding chars.
                        // This is useful primarily for
                        // RopeFunctions.  We put the buffer
                        // here to avoid locking in the
                        // multithreaded case.
    // The cached path is generally assumed to be valid
    // only if the buffer is valid.
    static void _S_setbuf(_Rope_iterator_base& __x);
                                        // Set buffer contents given
                                        // path cache.
    static void _S_setcache(_Rope_iterator_base& __x);
                                        // Set buffer contents and
                                        // path cache.
    static void _S_setcache_for_incr(_Rope_iterator_base& __x);
                                        // As above, but assumes path
                                        // cache is valid for previous posn.
    _Rope_iterator_base() {}
    _Rope_iterator_base(_RopeRep* __root, size_t __pos)
      : _M_current_pos(__pos), _M_root(__root), _M_buf_ptr(0) {}
    void _M_incr(size_t __n);
    void _M_decr(size_t __n);
  public:
    size_t index() const { return _M_current_pos; }
    _Rope_iterator_base(const _Rope_iterator_base& __x) {
        if (0 != __x._M_buf_ptr) {
            *this = __x;
        } else {
            _M_current_pos = __x._M_current_pos;
            _M_root = __x._M_root;
            _M_buf_ptr = 0;
        }
    }
};

template<class _CharT, class _Alloc> class _Rope_iterator;

template<class _CharT, class _Alloc>
class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
    friend class rope<_CharT,_Alloc>;
  protected:
      typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
      // The one from the base class may not be directly visible.
    _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
                   _Rope_iterator_base<_CharT,_Alloc>(
                     const_cast<_RopeRep*>(__root), __pos)
                   // Only nonconst iterators modify root ref count
    {}
  public:
    typedef _CharT reference;   // Really a value.  Returning a reference
                                // Would be a mess, since it would have
                                // to be included in refcount.
    typedef const _CharT* pointer;

  public:
    _Rope_const_iterator() {};
    _Rope_const_iterator(const _Rope_const_iterator& __x) :
                                _Rope_iterator_base<_CharT,_Alloc>(__x) { }
    _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x);
    _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
        _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) {}
    _Rope_const_iterator& operator= (const _Rope_const_iterator& __x) {
        if (0 != __x._M_buf_ptr) {
            *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
        } else {
            this->_M_current_pos = __x._M_current_pos;
            this->_M_root = __x._M_root;
            this->_M_buf_ptr = 0;
        }
        return(*this);
    }
    reference operator*() {
        if (0 == this->_M_buf_ptr) _S_setcache(*this);
        return *this->_M_buf_ptr;
    }
    _Rope_const_iterator& operator++() {
        __GC_CONST _CharT* __next;
        if (0 != this->_M_buf_ptr
	    && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
            this->_M_buf_ptr = __next;
            ++this->_M_current_pos;
        } else {
            this->_M_incr(1);
        }
        return *this;
    }
    _Rope_const_iterator& operator+=(ptrdiff_t __n) {
        if (__n >= 0) {
            this->_M_incr(__n);
        } else {
	    this->_M_decr(-__n);
        }
        return *this;
    }
    _Rope_const_iterator& operator--() {
        this->_M_decr(1);
        return *this;
    }
    _Rope_const_iterator& operator-=(ptrdiff_t __n) {
        if (__n >= 0) {
            this->_M_decr(__n);
        } else {
            this->_M_incr(-__n);
        }
        return *this;
    }
    _Rope_const_iterator operator++(int) {
        size_t __old_pos = this->_M_current_pos;
        this->_M_incr(1);
        return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
        // This makes a subsequent dereference expensive.
        // Perhaps we should instead copy the iterator
        // if it has a valid cache?
    }
    _Rope_const_iterator operator--(int) {
        size_t __old_pos = this->_M_current_pos;
        this->_M_decr(1);
        return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
    }
    template<class _CharT2, class _Alloc2>
    friend _Rope_const_iterator<_CharT2,_Alloc2> operator-
        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
         ptrdiff_t __n);
    template<class _CharT2, class _Alloc2>
    friend _Rope_const_iterator<_CharT2,_Alloc2> operator+
        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
         ptrdiff_t __n);
    template<class _CharT2, class _Alloc2>
    friend _Rope_const_iterator<_CharT2,_Alloc2> operator+
        (ptrdiff_t __n,
         const _Rope_const_iterator<_CharT2,_Alloc2>& __x);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -