⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 stl_rope.h

📁 本系统采用VC开发.可实现点云数据的处理,图像缩放,生成曲面.
💻 H
📖 第 1 页 / 共 5 页
字号:
};    #else /* !__STL_USE_STD_ALLOCATORS */#define __STATIC_IF_SGI_ALLOC statictemplate <class _CharT, class _Alloc> class _Rope_rep_base {public:  typedef _Alloc allocator_type;  static allocator_type get_allocator() { return allocator_type(); }  _Rope_rep_base(size_t __size, const allocator_type&) : _M_size(__size) {}  size_t _M_size;protected:# define __ROPE_DEFINE_ALLOC(_T, __name) \        typedef simple_alloc<_T, _Alloc> __name##Alloc; \        static _T* __name##_allocate(size_t __n) \                { return __name##Alloc::allocate(__n); } \        static void __name##_deallocate(_T* __p, size_t __n) \                { __name##Alloc::deallocate(__p, __n); }  __ROPE_DEFINE_ALLOCS(_Alloc);# undef __ROPE_DEFINE_ALLOC};#endif /* __STL_USE_STD_ALLOCATORS */template<class _CharT, class _Alloc>struct _Rope_RopeRep : public _Rope_rep_base<_CharT,_Alloc> {    public:    enum { _S_max_rope_depth = 45 };    enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};    _Tag _M_tag:8;    bool _M_is_balanced:8;    unsigned char _M_depth;    __GC_CONST _CharT* _M_c_string;                        /* Flattened version of string, if needed.  */                        /* typically 0.                             */                        /* If it's not 0, then the memory is owned  */                        /* by this node.                            */                        /* In the case of a leaf, this may point to */                        /* the same memory as the data field.       */    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;    _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t __size,                  allocator_type __a)        : _M_tag(__t), _M_depth(__d), _M_is_balanced(__b), _M_c_string(0),          _Rope_rep_base<_CharT,_Alloc>(__size, __a)    {#       ifndef __GC            _M_refcount = 1;            _M_init_refcount_lock();#       endif    }#   ifndef __GC#       if defined(__STL_WIN32THREADS)            long _M_refcount;   // InterlockedIncrement wants a long *#       else            size_t _M_refcount;#       endif        // We count references from rope instances        // and references from other rope nodes.  We        // do not count const_iterator references.        // Iterator references are counted so that rope modifications        // can be detected after the fact.        // Generally function results are counted, i.__e.        // a pointer returned by a function is included at the        // point at which the pointer is returned.        // The recipient should decrement the count if the        // __result is not needed.        // Generally function arguments are not reflected        // in the reference count.  The callee should increment        // the count before saving the argument someplace that        // will outlive the call.#   endif#   ifndef __GC#       ifdef __STL_SGI_THREADS            // Reference counting with multiple threads and no            // hardware or thread package support is pretty awful.            // Mutexes are normally too expensive.            // We'll assume a COMPARE_AND_SWAP(destp, __old, new)            // operation, which might be cheaper.#           if __mips < 3 || !(defined (_ABIN32) || defined(_ABI64))#               define __add_and_fetch(l,v) add_then_test((unsigned long*)l,v)#           endif            void _M_init_refcount_lock() {}            void _M_incr_refcount ()            {                __add_and_fetch(&_M_refcount, 1);            }            size_t _M_decr_refcount ()            {                return __add_and_fetch(&_M_refcount, (size_t)(-1));            }#       elif defined(__STL_WIN32THREADS)            void _M_init_refcount_lock() {}            void _M_incr_refcount ()            {                InterlockedIncrement(&_M_refcount);            }            size_t _M_decr_refcount ()            {                return InterlockedDecrement(&_M_refcount);            }#       elif defined(_PTHREADS)            // This should be portable, but performance is expected            // to be quite awful.  This really needs platform specific            // code.            pthread_mutex_t _M_refcount_lock;            void _M_init_refcount_lock() {                pthread_mutex_init(&_M_refcount_lock, 0);            }            void _M_incr_refcount ()            {                   pthread_mutex_lock(&_M_refcount_lock);                ++_M_refcount;                pthread_mutex_unlock(&_M_refcount_lock);            }            size_t _M_decr_refcount ()            {                   size_t __result;                pthread_mutex_lock(&_M_refcount_lock);                __result = --_M_refcount;                pthread_mutex_unlock(&_M_refcount_lock);                return __result;            }#       else            void _M_init_refcount_lock() {}            void _M_incr_refcount ()            {                ++_M_refcount;            }            size_t _M_decr_refcount ()            {                --_M_refcount;                return _M_refcount;            }#       endif#   else        void _M_incr_refcount () {}#   endif#   ifdef __STL_USE_STD_ALLOCATORS        static void _S_free_string(__GC_CONST _CharT*, size_t __len,                                   allocator_type __a);#       define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);#   else        static void _S_free_string(__GC_CONST _CharT*, size_t __len);#       define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l);#   endif                        // Deallocate data section of a leaf.                        // This shouldn't be a member function.                        // But its hard to do anything else at the                        // moment, because it's templatized w.r.t.                        // an allocator.                        // Does nothing if __GC is defined.#   ifndef __GC          void _M_free_c_string();          void _M_free_tree();                        // Deallocate t. Assumes t is not 0.          void _M_unref_nonnil()          {              if (0 == _M_decr_refcount()) _M_free_tree();          }          void _M_ref_nonnil()          {              _M_incr_refcount();          }          static void _S_unref(_Rope_RopeRep* __t)          {              if (0 != __t) {                  __t->_M_unref_nonnil();              }          }          static void _S_ref(_Rope_RopeRep* __t)          {              if (0 != __t) __t->_M_incr_refcount();          }          static void _S_free_if_unref(_Rope_RopeRep* __t)          {              if (0 != __t && 0 == __t->_M_refcount) __t->_M_free_tree();          }#   else /* __GC */          void _M_unref_nonnil() {}          void _M_ref_nonnil() {}          static void _S_unref(_Rope_RopeRep*) {}          static void _S_ref(_Rope_RopeRep*) {}          static void _S_free_if_unref(_Rope_RopeRep*) {}#   endif};template<class _CharT, class _Alloc>struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {  public:    // Apparently needed by VC++    // The data fields of leaves are allocated with some    // extra space, to accomodate future growth and for basic    // character types, to hold a trailing eos character.    enum { _S_alloc_granularity = 8 };    static size_t _S_rounded_up_size(size_t __n) {        size_t __size_with_eos;                     if (_S_is_basic_char_type((_CharT*)0)) {            __size_with_eos = __n + 1;        } else {            __size_with_eos = __n;        }#       ifdef __GC           return __size_with_eos;#       else           // Allow slop for in-place expansion.           return (__size_with_eos + _S_alloc_granularity-1)                        &~ (_S_alloc_granularity-1);#       endif    }    __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */                                /* The allocated size is         */                                /* _S_rounded_up_size(size), except */                                /* in the GC case, in which it   */                                /* doesn't matter.               */    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;    _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t __size, allocator_type __a)        : _M_data(__d)        , _Rope_RopeRep<_CharT,_Alloc>(_S_leaf, 0, true, __size, __a)        {        __stl_assert(__size > 0);        if (_S_is_basic_char_type((_CharT *)0)) {            // already eos terminated.            _M_c_string = __d;        }    }        // The constructor assumes that d has been allocated with        // the proper allocator and the properly padded size.        // In contrast, the destructor deallocates the data:# ifndef __GC    ~_Rope_RopeLeaf() {        if (_M_data != _M_c_string) {            _M_free_c_string();        }        __STL_FREE_STRING(_M_data, _M_size, get_allocator());    }# endif};template<class _CharT, class _Alloc>struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {  public:    _Rope_RopeRep<_CharT,_Alloc>* _M_left;    _Rope_RopeRep<_CharT,_Alloc>* _M_right;    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;    _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,                             _Rope_RopeRep<_CharT,_Alloc>* __r,                             allocator_type __a)      : _M_left(__l), _M_right(__r)      , _Rope_RopeRep<_CharT,_Alloc>(          _S_concat, max(__l->_M_depth, __r->_M_depth) + 1, false,          __l->_M_size + __r->_M_size, __a)      {}# ifndef __GC    ~_Rope_RopeConcatenation() {        _M_free_c_string();        _M_left->_M_unref_nonnil();        _M_right->_M_unref_nonnil();    }# endif};template<class _CharT, class _Alloc>struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {  public:    char_producer<_CharT>* _M_fn;#   ifndef __GC      bool _M_delete_when_done; // Char_producer is owned by the                                // rope and should be explicitly                                // deleted when the rope becomes                                // inaccessible.#   else      // In the GC case, we either register the rope for      // finalization, or not.  Thus the field is unnecessary;      // the information is stored in the collector data structures.      // We do need a finalization procedure to be invoked by the      // collector.      static void _S_fn_finalization_proc(void * __tree, void *) {        delete ((_Rope_RopeFunction *)__tree) -> _M_fn;      }#   endif    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;    _Rope_RopeFunction(char_producer<_CharT>* __f, size_t __size,                        bool __d, allocator_type __a)      : _M_fn(__f)#       ifndef __GC      , _M_delete_when_done(__d)#       endif      , _Rope_RopeRep<_CharT,_Alloc>(_S_function, 0, true, __size, __a) {        __stl_assert(__size > 0);#       ifdef __GC            if (__d) {                GC_REGISTER_FINALIZER(                  this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);            }#       endif    }# ifndef __GC    ~_Rope_RopeFunction() {          _M_free_c_string();          if (_M_delete_when_done) {              delete _M_fn;          }    }# endif};// Substring results are usually represented using just// concatenation nodes.  But in the case of very long flat ropes// or ropes with a functional representation that isn't practical.// In that case, we represent the __result as a special case of// RopeFunction, whose char_producer points back to the rope itself.// In all cases except repeated substring operations and// deallocation, we treat the __result as a RopeFunction.template<class _CharT, class _Alloc>struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,                             public char_producer<_CharT> {  public:    // XXX this whole class should be rewritten.    _Rope_RopeRep<_CharT,_Alloc>* _M_base;      // not 0    size_t _M_start;    virtual void operator()(size_t __start_pos, size_t __req_len,                            _CharT* __buffer) {        switch(_M_base->_M_tag) {            case _S_function:            case _S_substringfn:              {                char_producer<_CharT>* __fn =                        ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;                __stl_assert(__start_pos + __req_len <= _M_size);                __stl_assert(_M_start + _M_size <= _M_base->_M_size);                (*__fn)(__start_pos + _M_start, __req_len, __buffer);              }              break;            case _S_leaf:              {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -