⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 rope

📁 mingw32.rar
💻
📖 第 1 页 / 共 5 页
字号:
    // Constructor    __gthread_mutex_t _M_ref_count_lock;    _Refcount_Base(_RC_t __n) : _M_ref_count(__n), _M_ref_count_lock()    {#ifdef __GTHREAD_MUTEX_INIT      __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;      _M_ref_count_lock = __tmp;#elif defined(__GTHREAD_MUTEX_INIT_FUNCTION)      __GTHREAD_MUTEX_INIT_FUNCTION (&_M_ref_count_lock);#else#error __GTHREAD_MUTEX_INIT or __GTHREAD_MUTEX_INIT_FUNCTION should be defined by gthr.h abstraction layer, report problem to libstdc++@gcc.gnu.org.#endif    }    void    _M_incr()    {      __gthread_mutex_lock(&_M_ref_count_lock);      ++_M_ref_count;      __gthread_mutex_unlock(&_M_ref_count_lock);    }    _RC_t    _M_decr()    {      __gthread_mutex_lock(&_M_ref_count_lock);      volatile _RC_t __tmp = --_M_ref_count;      __gthread_mutex_unlock(&_M_ref_count_lock);      return __tmp;    }  };//// What follows should really be local to rope.  Unfortunately,// that doesn't work, since it makes it impossible to define generic// equality on rope iterators.  According to the draft standard, the// template parameters for such an equality operator cannot be inferred// from the occurrence of a member class as a parameter.// (SGI compilers in fact allow this, but the __result wouldn't be// portable.)// Similarly, some of the static member functions are member functions// only to avoid polluting the global namespace, and to circumvent// restrictions on type inference for template functions.////// The internal data structure for representing a rope.  This is// private to the implementation.  A rope is really just a pointer// to one of these.//// A few basic functions for manipulating this data structure// are members of _RopeRep.  Most of the more complex algorithms// are implemented as rope members.//// Some of the static member functions of _RopeRep have identically// named functions in rope that simply invoke the _RopeRep versions.#define __ROPE_DEFINE_ALLOCS(__a) \        __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \        typedef _Rope_RopeConcatenation<_CharT,__a> __C; \        __ROPE_DEFINE_ALLOC(__C,_C) \        typedef _Rope_RopeLeaf<_CharT,__a> __L; \        __ROPE_DEFINE_ALLOC(__L,_L) \        typedef _Rope_RopeFunction<_CharT,__a> __F; \        __ROPE_DEFINE_ALLOC(__F,_F) \        typedef _Rope_RopeSubstring<_CharT,__a> __S; \        __ROPE_DEFINE_ALLOC(__S,_S)//  Internal rope nodes potentially store a copy of the allocator//  instance used to allocate them.  This is mostly redundant.//  But the alternative would be to pass allocator instances around//  in some form to nearly all internal functions, since any pointer//  assignment may result in a zero reference count and thus require//  deallocation.#define __STATIC_IF_SGI_ALLOC  /* not static */template <class _CharT, class _Alloc>struct _Rope_rep_base: public _Alloc{  typedef _Alloc allocator_type;  allocator_type  get_allocator() const { return *static_cast<const _Alloc*>(this); }  _Rope_rep_base(size_t __size, const allocator_type&)  : _M_size(__size) {}  size_t _M_size;# define __ROPE_DEFINE_ALLOC(_Tp, __name) \        typedef typename \          _Alloc::template rebind<_Tp>::other __name##Alloc; \        static _Tp* __name##_allocate(size_t __n) \          { return __name##Alloc().allocate(__n); } \        static void __name##_deallocate(_Tp *__p, size_t __n) \          { __name##Alloc().deallocate(__p, __n); }  __ROPE_DEFINE_ALLOCS(_Alloc)# undef __ROPE_DEFINE_ALLOC};namespace _Rope_constants{  enum { _S_max_rope_depth = 45 };  enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};}template<class _CharT, class _Alloc>struct _Rope_RopeRep : public _Rope_rep_base<_CharT,_Alloc># ifndef __GC    , _Refcount_Base# endif{    public:    _Rope_constants::_Tag _M_tag:8;    bool _M_is_balanced:8;    unsigned char _M_depth;    __GC_CONST _CharT* _M_c_string;    __gthread_mutex_t _M_c_string_lock;                        /* Flattened version of string, if needed.  */                        /* typically 0.                             */                        /* If it's not 0, then the memory is owned  */                        /* by this node.                            */                        /* In the case of a leaf, this may point to */                        /* the same memory as the data field.       */    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type                        allocator_type;    using _Rope_rep_base<_CharT,_Alloc>::get_allocator;    _Rope_RopeRep(_Rope_constants::_Tag __t, int __d, bool __b, size_t __size,                  allocator_type __a)        : _Rope_rep_base<_CharT,_Alloc>(__size, __a),#         ifndef __GC          _Refcount_Base(1),#         endif          _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0)#ifdef __GTHREAD_MUTEX_INIT    {	// Do not copy a POSIX/gthr mutex once in use.  However, bits are bits.	__gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;	_M_c_string_lock = __tmp;    }#else    { __GTHREAD_MUTEX_INIT_FUNCTION (&_M_c_string_lock); }#endif#   ifdef __GC        void _M_incr () {}#   endif        static void _S_free_string(__GC_CONST _CharT*, size_t __len,                                   allocator_type __a);#       define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);                        // Deallocate data section of a leaf.                        // This shouldn't be a member function.                        // But its hard to do anything else at the                        // moment, because it's templatized w.r.t.                        // an allocator.                        // Does nothing if __GC is defined.#   ifndef __GC          void _M_free_c_string();          void _M_free_tree();                        // Deallocate t. Assumes t is not 0.          void _M_unref_nonnil()          {              if (0 == _M_decr()) _M_free_tree();          }          void _M_ref_nonnil()          {              _M_incr();          }          static void _S_unref(_Rope_RopeRep* __t)          {              if (0 != __t) {                  __t->_M_unref_nonnil();              }          }          static void _S_ref(_Rope_RopeRep* __t)          {              if (0 != __t) __t->_M_incr();          }          static void _S_free_if_unref(_Rope_RopeRep* __t)          {              if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();          }#   else /* __GC */          void _M_unref_nonnil() {}          void _M_ref_nonnil() {}          static void _S_unref(_Rope_RopeRep*) {}          static void _S_ref(_Rope_RopeRep*) {}          static void _S_free_if_unref(_Rope_RopeRep*) {}#   endifprotected:  _Rope_RopeRep&  operator=(const _Rope_RopeRep&);  _Rope_RopeRep(const _Rope_RopeRep&);};template<class _CharT, class _Alloc>struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {  public:    // Apparently needed by VC++    // The data fields of leaves are allocated with some    // extra space, to accommodate future growth and for basic    // character types, to hold a trailing eos character.    enum { _S_alloc_granularity = 8 };    static size_t _S_rounded_up_size(size_t __n) {        size_t __size_with_eos;        if (_S_is_basic_char_type((_CharT*)0)) {            __size_with_eos = __n + 1;        } else {            __size_with_eos = __n;        }#       ifdef __GC           return __size_with_eos;#       else           // Allow slop for in-place expansion.           return (__size_with_eos + _S_alloc_granularity-1)                        &~ (_S_alloc_granularity-1);#       endif    }    __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */                                /* The allocated size is         */                                /* _S_rounded_up_size(size), except */                                /* in the GC case, in which it   */                                /* doesn't matter.               */    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type                        allocator_type;    _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t __size, allocator_type __a)        : _Rope_RopeRep<_CharT,_Alloc>(_Rope_constants::_S_leaf, 0, true, __size, __a), _M_data(__d)        {        if (_S_is_basic_char_type((_CharT *)0)) {            // already eos terminated.            this->_M_c_string = __d;        }    }        // The constructor assumes that d has been allocated with        // the proper allocator and the properly padded size.        // In contrast, the destructor deallocates the data:# ifndef __GC    ~_Rope_RopeLeaf() throw() {        if (_M_data != this->_M_c_string) {	  this->_M_free_c_string();        }        __STL_FREE_STRING(_M_data, this->_M_size, this->get_allocator());    }# endifprotected:  _Rope_RopeLeaf&  operator=(const _Rope_RopeLeaf&);  _Rope_RopeLeaf(const _Rope_RopeLeaf&);};template<class _CharT, class _Alloc>struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {  public:    _Rope_RopeRep<_CharT,_Alloc>* _M_left;    _Rope_RopeRep<_CharT,_Alloc>* _M_right;    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type                        allocator_type;    _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,                             _Rope_RopeRep<_CharT,_Alloc>* __r,                             allocator_type __a)      : _Rope_RopeRep<_CharT,_Alloc>(_Rope_constants::_S_concat,                                     std::max(__l->_M_depth, __r->_M_depth) + 1,                                     false,                                     __l->_M_size + __r->_M_size, __a),        _M_left(__l), _M_right(__r)      {}# ifndef __GC    ~_Rope_RopeConcatenation() throw() {      this->_M_free_c_string();      _M_left->_M_unref_nonnil();      _M_right->_M_unref_nonnil();    }# endifprotected:  _Rope_RopeConcatenation&  operator=(const _Rope_RopeConcatenation&);  _Rope_RopeConcatenation(const _Rope_RopeConcatenation&);};template<class _CharT, class _Alloc>struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {  public:    char_producer<_CharT>* _M_fn;#   ifndef __GC      bool _M_delete_when_done; // Char_producer is owned by the                                // rope and should be explicitly                                // deleted when the rope becomes                                // inaccessible.#   else      // In the GC case, we either register the rope for      // finalization, or not.  Thus the field is unnecessary;      // the information is stored in the collector data structures.      // We do need a finalization procedure to be invoked by the      // collector.      static void _S_fn_finalization_proc(void * __tree, void *) {        delete ((_Rope_RopeFunction *)__tree) -> _M_fn;      }#   endif    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type                                        allocator_type;    _Rope_RopeFunction(char_producer<_CharT>* __f, size_t __size,                        bool __d, allocator_type __a)      : _Rope_RopeRep<_CharT,_Alloc>(_Rope_constants::_S_function,				     0, true, __size, __a)      , _M_fn(__f)#       ifndef __GC      , _M_delete_when_done(__d)#       endif    {#       ifdef __GC            if (__d) {                GC_REGISTER_FINALIZER(                  this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);            }#       endif    }# ifndef __GC    ~_Rope_RopeFunction() throw() {          this->_M_free_c_string();          if (_M_delete_when_done) {              delete _M_fn;          }    }# endifprotected:  _Rope_RopeFunction&  operator=(const _Rope_RopeFunction&);  _Rope_RopeFunction(const _Rope_RopeFunction&);};// Substring results are usually represented using just// concatenation nodes.  But in the case of very long flat ropes// or ropes with a functional representation that isn't practical.// In that case, we represent the __result as a special case of// RopeFunction, whose char_producer points back to the rope itself.// In all cases except repeated substring operations and// deallocation, we treat the __result as a RopeFunction.template<class _CharT, class _Alloc>struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,                             public char_producer<_CharT> {  public:    // XXX this whole class should be rewritten.    _Rope_RopeRep<_CharT,_Alloc>* _M_base;      // not 0    size_t _M_start;    virtual void operator()(size_t __start_pos, size_t __req_len,                            _CharT* __buffer) {        switch(_M_base->_M_tag) {            case _Rope_constants::_S_function:            case _Rope_constants::_S_substringfn:

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -