📄 stl_rope.h
字号:
{
__stl_assert(__l > 0);
__stl_assert(__s + __l <= __b->_M_size._M_data);
# ifndef __GC
_M_base->_M_ref_nonnil();
# endif
_M_tag = _S_substringfn;
}
virtual ~_Rope_RopeSubstring()
{
# ifndef __GC
_M_base->_M_unref_nonnil();
// _M_free_c_string(); -- done by parent class
# endif
}
};
// Self-destructing pointers to Rope_rep.
// These are not conventional smart pointers. Their
// only purpose in life is to ensure that unref is called
// on the pointer either at normal exit or if an exception
// is raised. It is the caller's responsibility to
// adjust reference counts when these pointers are initialized
// or assigned to. (This convention significantly reduces
// the number of potentially expensive reference count
// updates.)
#ifndef __GC
template<class _CharT, class _Alloc>
struct _Rope_self_destruct_ptr {
_Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
~_Rope_self_destruct_ptr()
{ _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
# ifdef __STL_USE_EXCEPTIONS
_Rope_self_destruct_ptr() : _M_ptr(0) {};
# else
_Rope_self_destruct_ptr() {};
# endif
_Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
_Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
_Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
_Rope_self_destruct_ptr<_CharT, _Alloc>&
operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
{ _M_ptr = __x; return *this; }
};
#endif
// Dereferencing a nonconst iterator has to return something
// that behaves almost like a reference. It's not possible to
// return an actual reference since assignment requires extra
// work. And we would get into the same problems as with the
// CD2 version of basic_string.
template<class _CharT, class _Alloc>
class _Rope_char_ref_proxy {
typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
friend class rope<_CharT,_Alloc>;
friend class _Rope_iterator<_CharT,_Alloc>;
friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
# ifdef __GC
typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
# else
typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
# endif
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
typedef rope<_CharT,_Alloc> _My_rope;
size_t _M_pos;
_CharT _M_current;
bool _M_current_valid;
_My_rope* _M_root; // The whole rope.
public:
_Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
_M_pos(__p), _M_current_valid(false), _M_root(__r) {}
_Rope_char_ref_proxy(const _Self& __x) :
_M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
// Don't preserve cache if the reference can outlive the
// expression. We claim that's not possible without calling
// a copy constructor or generating reference to a proxy
// reference. We declare the latter to have undefined semantics.
_Rope_char_ref_proxy(_My_rope* __r, size_t __p,
_CharT __c) :
_M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
inline operator _CharT () const;
_Self& operator= (_CharT __c);
_Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
_Self& operator= (const _Self& __c) {
return operator=((_CharT)__c);
}
};
#ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
template<class _CharT, class __Alloc>
inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
_Rope_char_ref_proxy <_CharT, __Alloc > __b) {
_CharT __tmp = __a;
__a = __b;
__b = __tmp;
}
#else
// There is no really acceptable way to handle this. The default
// definition of swap doesn't work for proxy references.
// It can't really be made to work, even with ugly hacks, since
// the only unusual operation it uses is the copy constructor, which
// is needed for other purposes. We provide a macro for
// full specializations, and instantiate the most common case.
# define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
_Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
_CharT __tmp = __a; \
__a = __b; \
__b = __tmp; \
}
_ROPE_SWAP_SPECIALIZATION(char,__STL_DEFAULT_ALLOCATOR(char) )
#endif /* !__STL_FUNCTION_TMPL_PARTIAL_ORDER */
template<class _CharT, class _Alloc>
class _Rope_char_ptr_proxy {
// XXX this class should be rewritten.
typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
size_t _M_pos;
rope<_CharT,_Alloc>* _M_root; // The whole rope.
public:
_Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
: _M_pos(__x._M_pos), _M_root(__x._M_root) {}
_Rope_char_ptr_proxy(const _Self& __x)
: _M_pos(__x._M_pos), _M_root(__x._M_root) {}
_Rope_char_ptr_proxy() {}
_Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
__stl_assert(0 == __x);
}
_Self&
operator= (const _Self& __x) {
_M_pos = __x._M_pos;
_M_root = __x._M_root;
return *this;
}
friend inline bool operator== __STL_NULL_TMPL_ARGS
(const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y);
_Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
}
};
// Rope iterators:
// Unlike in the C version, we cache only part of the stack
// for rope iterators, since they must be efficiently copyable.
// When we run out of cache, we have to reconstruct the iterator
// value.
// Pointers from iterators are not included in reference counts.
// Iterators are assumed to be thread private. Ropes can
// be shared.
#if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
#pragma set woff 1375
#endif
template<class _CharT, class _Alloc>
class _Rope_iterator_base
/* : public random_access_iterator<_CharT, ptrdiff_t> */
{
friend class rope<_CharT,_Alloc>;
typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
public:
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
// Borland doesnt want this to be protected.
// protected:
enum { _S_path_cache_len = 4 }; // Must be <= 9.
enum { _S_iterator_buf_len = 15 };
size_t _M_current_pos;
_RopeRep* _M_root; // The whole rope.
size_t _M_leaf_pos; // Starting position for current leaf
__GC_CONST _CharT* _M_buf_start;
// Buffer possibly
// containing current char.
__GC_CONST _CharT* _M_buf_ptr;
// Pointer to current char in buffer.
// != 0 ==> buffer valid.
__GC_CONST _CharT* _M_buf_end;
// One past __last valid char in buffer.
// What follows is the path cache. We go out of our
// way to make this compact.
// Path_end contains the bottom section of the path from
// the root to the current leaf.
const _RopeRep* _M_path_end[_S_path_cache_len];
int _M_leaf_index; // Last valid __pos in path_end;
// _M_path_end[0] ... _M_path_end[leaf_index-1]
// point to concatenation nodes.
unsigned char _M_path_directions;
// (path_directions >> __i) & 1 is 1
// iff we got from _M_path_end[leaf_index - __i - 1]
// to _M_path_end[leaf_index - __i] by going to the
// __right. Assumes path_cache_len <= 9.
_CharT _M_tmp_buf[_S_iterator_buf_len];
// Short buffer for surrounding chars.
// This is useful primarily for
// RopeFunctions. We put the buffer
// here to avoid locking in the
// multithreaded case.
// The cached path is generally assumed to be valid
// only if the buffer is valid.
static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
// Set buffer contents given
// path cache.
static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
// Set buffer contents and
// path cache.
static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
// As above, but assumes path
// cache is valid for previous posn.
_Rope_iterator_base() {}
_Rope_iterator_base(_RopeRep* __root, size_t __pos)
: _M_current_pos(__pos),_M_root(__root), _M_buf_ptr(0) {}
void _M_incr(size_t __n);
void _M_decr(size_t __n);
public:
size_t index() const { return _M_current_pos; }
_Rope_iterator_base(const _Self& __x) {
if (0 != __x._M_buf_ptr) {
*this = __x;
} else {
_M_current_pos = __x._M_current_pos;
_M_root = __x._M_root;
_M_buf_ptr = 0;
}
}
};
template<class _CharT, class _Alloc> class _Rope_iterator;
template<class _CharT, class _Alloc>
class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
friend class rope<_CharT,_Alloc>;
typedef _Rope_const_iterator<_CharT, _Alloc> _Self;
typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
// protected:
public:
# ifdef __STL_HAS_NAMESPACES
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
// The one from the base class may not be directly visible.
# endif
_Rope_const_iterator(const _RopeRep* __root, size_t __pos):
_Rope_iterator_base<_CharT,_Alloc>(
__CONST_CAST(_RopeRep*,__root), __pos)
// Only nonconst iterators modify root ref count
{}
public:
typedef _CharT reference; // Really a value. Returning a reference
// Would be a mess, since it would have
// to be included in refcount.
typedef const _CharT* pointer;
typedef _CharT value_type;
typedef ptrdiff_t difference_type;
typedef random_access_iterator_tag iterator_category;
public:
_Rope_const_iterator() {};
_Rope_const_iterator(const _Self& __x) :
_Rope_iterator_base<_CharT,_Alloc>(__x) { }
_Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
_Rope_iterator_base<_CharT,_Alloc>(__x) {}
_Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
_Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
_Self& operator= (const _Self& __x) {
if (0 != __x._M_buf_ptr) {
*(__STATIC_CAST(_Base*,this)) = __x;
} else {
_M_current_pos = __x._M_current_pos;
_M_root = __x._M_root;
_M_buf_ptr = 0;
}
return(*this);
}
reference operator*() {
if (0 == _M_buf_ptr) _S_setcache(*this);
return *_M_buf_ptr;
}
_Self& operator++() {
__GC_CONST _CharT* __next;
if (0 != _M_buf_ptr && (__next = _M_buf_ptr + 1) < _M_buf_end) {
_M_buf_ptr = __next;
++_M_current_pos;
} else {
_M_incr(1);
}
return *this;
}
_Self& operator+=(ptrdiff_t __n) {
if (__n >= 0) {
_M_incr(__n);
} else {
_M_decr(-__n);
}
return *this;
}
_Self& operator--() {
_M_decr(1);
return *this;
}
_Self& operator-=(ptrdiff_t __n) {
if (__n >= 0) {
_M_decr(__n);
} else {
_M_incr(-__n);
}
return *this;
}
_Self operator++(int) {
size_t __old_pos = _M_current_pos;
_M_incr(1);
return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
// This makes a subsequent dereference expensive.
// Perhaps we should instead copy the iterator
// if it has a valid cache?
}
_Self operator--(int) {
size_t __old_pos = _M_current_pos;
_M_decr(1);
return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
}
inline reference operator[](size_t __n);
};
template<class _CharT, class _Alloc>
class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
friend class rope<_CharT,_Alloc>;
typedef _Rope_iterator<_CharT, _Alloc> _Self;
typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
// protected:
public:
rope<_CharT,_Alloc>* _M_root_rope;
// root is treated as a cached version of this,
// and is used to detect changes to the underlying
// rope.
// Root is included in the reference count.
// This is necessary so that we can detect changes reliably.
// Unfortunately, it requires careful bookkeeping for the
// nonGC case.
_Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos)
: _Rope_iterator_base<_CharT,_Alloc>(__r->_M_tree_ptr._M_data, __pos),
_M_root_rope(__r)
{ _RopeRep::_S_ref(_M_root); }
void _M_check();
public:
typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
typedef _CharT value_type;
typedef ptrdiff_t difference_type;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -