📄 _rope.h
字号:
typedef typename _Lor2<_IsChar, _IsWCharT>::_Ret _IsBasicCharType;
#if 0
/* Please tell why this code is necessary if you uncomment it.
* Problem with it is that rope implementation expect that _S_rounded_up_size(n)
* returns a size > n in order to store the terminating null charater. When
* instanciation type is not a char or wchar_t this is not guaranty resulting in
* memory overrun.
*/
static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
// Allow slop for in-place expansion.
return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
}
static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
// Allow slop for in-place expansion.
return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
}
#endif
// fbp : moved from RopeLeaf
static size_t _S_rounded_up_size(size_t __n)
//{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
{ return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
static void _S_free_string( _CharT* __s, size_t __len,
allocator_type __a) {
_STLP_STD::_Destroy_Range(__s, __s + __len);
// This has to be a static member, so this gets a bit messy
# ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
__a.deallocate(__s, _S_rounded_up_size(__len)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
# else
__stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
# endif
}
// Deallocate data section of a leaf.
// This shouldn't be a member function.
// But its hard to do anything else at the
// moment, because it's templatized w.r.t.
// an allocator.
// Does nothing if __GC is defined.
void _M_free_c_string();
void _M_free_tree();
// Deallocate t. Assumes t is not 0.
void _M_unref_nonnil() {
if (_M_decr() == 0) _M_free_tree();
}
void _M_ref_nonnil() {
_M_incr();
}
static void _S_unref(_Self* __t) {
if (0 != __t) {
__t->_M_unref_nonnil();
}
}
static void _S_ref(_Self* __t) {
if (0 != __t) __t->_M_incr();
}
//static void _S_free_if_unref(_Self* __t) {
// if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
//}
};
template<class _CharT, class _Alloc>
struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
public:
_CharT* _M_data; /* Not necessarily 0 terminated. */
/* The allocated size is */
/* _S_rounded_up_size(size), except */
/* in the GC case, in which it */
/* doesn't matter. */
private:
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
void _M_init(__true_type const& /*_IsBasicCharType*/) {
this->_M_c_string = _M_data;
}
void _M_init(__false_type const& /*_IsBasicCharType*/) {}
public:
_STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
typedef typename _RopeRep::allocator_type allocator_type;
_Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
: _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
_M_data(__d) {
_STLP_ASSERT(_p_size > 0)
_M_init(_IsBasicCharType());
}
# ifdef _STLP_NO_ARROW_OPERATOR
_Rope_RopeLeaf() {}
_Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
# endif
// The constructor assumes that d has been allocated with
// the proper allocator and the properly padded size.
// In contrast, the destructor deallocates the data:
~_Rope_RopeLeaf() {
if (_M_data != this->_M_c_string) {
this->_M_free_c_string();
}
_RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
}
};
template<class _CharT, class _Alloc>
struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
private:
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
public:
_RopeRep* _M_left;
_RopeRep* _M_right;
_STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
typedef typename _RopeRep::allocator_type allocator_type;
_Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
: _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
(max)(__l->_M_depth, __r->_M_depth) + 1, false,
__l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
{}
# ifdef _STLP_NO_ARROW_OPERATOR
_Rope_RopeConcatenation() {}
_Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
# endif
~_Rope_RopeConcatenation() {
this->_M_free_c_string();
_M_left->_M_unref_nonnil();
_M_right->_M_unref_nonnil();
}
};
template <class _CharT, class _Alloc>
struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
private:
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
public:
char_producer<_CharT>* _M_fn;
/*
* Char_producer is owned by the
* rope and should be explicitly
* deleted when the rope becomes
* inaccessible.
*/
bool _M_delete_when_done;
_STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
# ifdef _STLP_NO_ARROW_OPERATOR
_Rope_RopeFunction() {}
_Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
# endif
_Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
bool __d, allocator_type __a)
: _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
, _M_delete_when_done(__d)
{ _STLP_ASSERT(_p_size > 0) }
~_Rope_RopeFunction() {
this->_M_free_c_string();
if (_M_delete_when_done) {
delete _M_fn;
}
}
};
/*
* Substring results are usually represented using just
* concatenation nodes. But in the case of very long flat ropes
* or ropes with a functional representation that isn't practical.
* In that case, we represent the __result as a special case of
* RopeFunction, whose char_producer points back to the rope itself.
* In all cases except repeated substring operations and
* deallocation, we treat the __result as a RopeFunction.
*/
template<class _CharT, class _Alloc>
struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
public:
// XXX this whole class should be rewritten.
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
_RopeRep *_M_base; // not 0
size_t _M_start;
/* virtual */ void operator()(size_t __start_pos, size_t __req_len,
_CharT* __buffer) {
typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
switch (_M_base->_M_tag) {
case _RopeRep::_S_function:
case _RopeRep::_S_substringfn:
{
char_producer<_CharT>* __fn =
__STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
_STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
_STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
(*__fn)(__start_pos + _M_start, __req_len, __buffer);
}
break;
case _RopeRep::_S_leaf:
{
_CharT* __s =
__STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
_STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
}
break;
default:
_STLP_ASSERT(false)
;
}
}
_STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
typedef typename _RopeRep::allocator_type allocator_type;
_Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
: _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
_M_base(__b), _M_start(__s) {
_STLP_ASSERT(__l > 0)
_STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
_M_base->_M_ref_nonnil();
this->_M_tag = _RopeRep::_S_substringfn;
}
virtual ~_Rope_RopeSubstring()
{ _M_base->_M_unref_nonnil(); }
};
/*
* Self-destructing pointers to Rope_rep.
* These are not conventional smart pointers. Their
* only purpose in life is to ensure that unref is called
* on the pointer either at normal exit or if an exception
* is raised. It is the caller's responsibility to
* adjust reference counts when these pointers are initialized
* or assigned to. (This convention significantly reduces
* the number of potentially expensive reference count
* updates.)
*/
template<class _CharT, class _Alloc>
struct _Rope_self_destruct_ptr {
_Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
~_Rope_self_destruct_ptr()
{ _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
# ifdef _STLP_USE_EXCEPTIONS
_Rope_self_destruct_ptr() : _M_ptr(0) {}
# else
_Rope_self_destruct_ptr() {}
# endif
_Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
_Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
_Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
_Rope_self_destruct_ptr<_CharT, _Alloc>&
operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
{ _M_ptr = __x; return *this; }
};
/*
* Dereferencing a nonconst iterator has to return something
* that behaves almost like a reference. It's not possible to
* return an actual reference since assignment requires extra
* work. And we would get into the same problems as with the
* CD2 version of basic_string.
*/
template<class _CharT, class _Alloc>
class _Rope_char_ref_proxy {
typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
friend class rope<_CharT,_Alloc>;
friend class _Rope_iterator<_CharT,_Alloc>;
friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
typedef rope<_CharT,_Alloc> _My_rope;
size_t _M_pos;
_CharT _M_current;
bool _M_current_valid;
_My_rope* _M_root; // The whole rope.
public:
_Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
_M_pos(__p), _M_current_valid(false), _M_root(__r) {}
_Rope_char_ref_proxy(const _Self& __x) :
_M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
// Don't preserve cache if the reference can outlive the
// expression. We claim that's not possible without calling
// a copy constructor or generating reference to a proxy
// reference. We declare the latter to have undefined semantics.
_Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
: _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
inline operator _CharT () const;
_Self& operator= (_CharT __c);
_Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
_Self& operator= (const _Self& __c) {
return operator=((_CharT)__c);
}
};
#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
template<class _CharT, class __Alloc>
inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
_Rope_char_ref_proxy <_CharT, __Alloc > __b) {
_CharT __tmp = __a;
__a = __b;
__b = __tmp;
}
#else
// There is no really acceptable way to handle this. The default
// definition of swap doesn't work for proxy references.
// It can't really be made to work, even with ugly hacks, since
// the only unusual operation it uses is the copy constructor, which
// is needed for other purposes. We provide a macro for
// full specializations, and instantiate the most common case.
# define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
_Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
_CharT __tmp = __a; \
__a = __b; \
__b = __tmp; \
}
_ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
# ifndef _STLP_NO_WCHAR_T
_ROPE_SWAP_SPECIALIZATION(wchar_t,_STLP_DEFAULT_ALLOCATOR(wchar_t) )
# endif
#endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
template<class _CharT, class _Alloc>
class _Rope_char_ptr_proxy {
// XXX this class should be rewritten.
public:
typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
size_t _M_pos;
rope<_CharT,_Alloc>* _M_root; // The whole rope.
_Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
: _M_pos(__x._M_pos), _M_root(__x._M_root) {}
_Rope_char_ptr_proxy(const _Self& __x)
: _M_pos(__x._M_pos), _M_root(__x._M_root) {}
_Rope_char_ptr_proxy() {}
_Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
_STLP_ASSERT(0 == __x)
}
_Self& operator= (const _Self& __x) {
_M_pos = __x._M_pos;
_M_root = __x._M_root;
return *this;
}
_Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
}
};
/*
* Rope iterators:
* Unlike in the C version, we cache only part of the stack
* for rope iterators, since they must be efficiently copyable.
* When we run out of cache, we have to reconstruct the iterator
* value.
* Pointers from iterators are not included in reference counts.
* Iterators are assumed to be thread private. Ropes can
* be shared.
*/
template<class _CharT, class _Alloc>
class _Rope_iterator_base
/* : public random_access_iterator<_CharT, ptrdiff_t> */
{
friend class rope<_CharT,_Alloc>;
typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
public:
typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -