📄 stl_rope.h
字号:
_Self& write(value_type* __s, size_t __len)
{
append(__s, __len);
return *this;
}
_Self& put(value_type __x)
{
push_back(__x);
return *this;
}
_Self& operator=(const value_type& __rhs)
{
push_back(__rhs);
return *this;
}
_Self& operator*() { return *this; }
_Self& operator++() { return *this; }
_Self& operator++(int) { return *this; }
};
// The following should be treated as private, at least for now.
template<class _CharT>
class _Rope_char_consumer {
public:
// If we had member templates, these should not be virtual.
// For now we need to use run-time parametrization where
// compile-time would do. _Hence this should all be private
// for now.
// The symmetry with char_producer is accidental and temporary.
virtual ~_Rope_char_consumer() {};
virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
};
//
// What follows should really be local to rope. Unfortunately,
// that doesn't work, since it makes it impossible to define generic
// equality on rope iterators. According to the draft standard, the
// template parameters for such an equality operator cannot be inferred
// from the occurence of a member class as a parameter.
// (SGI compilers in fact allow this, but the __result wouldn't be
// portable.)
// Similarly, some of the static member functions are member functions
// only to avoid polluting the global namespace, and to circumvent
// restrictions on type inference for template functions.
//
//
// The internal data structure for representing a rope. This is
// private to the implementation. A rope is really just a pointer
// to one of these.
//
// A few basic functions for manipulating this data structure
// are members of _RopeRep. Most of the more complex algorithms
// are implemented as rope members.
//
// Some of the static member functions of _RopeRep have identically
// named functions in rope that simply invoke the _RopeRep versions.
//
// A macro to introduce various allocation and deallocation functions
// These need to be defined differently depending on whether or not
// we are using standard conforming allocators, and whether the allocator
// instances have real state. Thus this macro is invoked repeatedly
// with different definitions of __ROPE_DEFINE_ALLOC.
#if defined (__STL_MEMBER_TEMPLATE_CLASSES)
# define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy) \
typedef typename \
_Alloc_traits<_Tp,_Alloc>::allocator_type __name##Allocator;
#define __ROPE_DEFINE_ALLOCS(__a, _M_proxy) \
__ROPE_DEFINE_ALLOC(_CharT,_Data, _M_proxy) /* character data */ \
typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
__ROPE_DEFINE_ALLOC(__C,_C, _M_proxy) \
typedef _Rope_RopeLeaf<_CharT,__a> __L; \
__ROPE_DEFINE_ALLOC(__L,_L, _M_proxy) \
typedef _Rope_RopeFunction<_CharT,__a> __F; \
__ROPE_DEFINE_ALLOC(__F,_F, _M_proxy) \
typedef _Rope_RopeSubstring<_CharT,__a> __S; \
__ROPE_DEFINE_ALLOC(__S,_S,_M_proxy)
#else
#define __ROPE_DEFINE_ALLOC(_Tp, __name, _M_proxy)
#define __ROPE_DEFINE_ALLOCS(__a, _M_proxy)
#endif
template<class _CharT, class _Alloc>
struct _Rope_RopeRep
# ifndef __GC
: public _Refcount_Base
# endif
{
typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
public:
# define __ROPE_MAX_DEPTH 45
# define __ROPE_DEPTH_SIZE __ROPE_MAX_DEPTH+1
enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
// Apparently needed by VC++
// The data fields of leaves are allocated with some
// extra space, to accomodate future growth and for basic
// character types, to hold a trailing eos character.
enum { _S_alloc_granularity = 8 };
_Tag _M_tag:8;
bool _M_is_balanced:8;
typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type
allocator_type;
allocator_type get_allocator() const { return allocator_type(_M_size); }
unsigned char _M_depth;
__GC_CONST _CharT* _M_c_string;
_STL_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
/* Flattened version of string, if needed. */
/* typically 0. */
/* If it's not 0, then the memory is owned */
/* by this node. */
/* In the case of a leaf, this may point to */
/* the same memory as the data field. */
_Rope_RopeRep(_Tag __t, int __d, bool __b, size_t _p_size,
allocator_type __a) :
# ifndef __GC
_Refcount_Base(1),
# endif
_M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
{ }
# ifdef __GC
void _M_incr () {}
# endif
// fbp : moved from RopeLeaf
static size_t _S_rounded_up_size(size_t __n) {
size_t __size_with_eos;
if (_S_is_basic_char_type((_CharT*)0)) {
__size_with_eos = __n + 1;
} else {
__size_with_eos = __n;
}
# ifdef __GC
return __size_with_eos;
# else
// Allow slop for in-place expansion.
return (__size_with_eos + _S_alloc_granularity-1)
&~ (_S_alloc_granularity-1);
# endif
}
static void _S_free_string(__GC_CONST _CharT* __s, size_t __len,
allocator_type __a) {
if (!_S_is_basic_char_type((_CharT*)0)) {
destroy(__s, __s + __len);
}
// This has to be a static member, so this gets a bit messy
# ifdef __STL_MEMBER_TEMPLATE_CLASSES
__a.deallocate(__s, _S_rounded_up_size(__len));
# else
__stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
# endif
}
// Deallocate data section of a leaf.
// This shouldn't be a member function.
// But its hard to do anything else at the
// moment, because it's templatized w.r.t.
// an allocator.
// Does nothing if __GC is defined.
# ifndef __GC
void _M_free_c_string();
void _M_free_tree();
// Deallocate t. Assumes t is not 0.
void _M_unref_nonnil()
{
if (0 == _M_decr()) _M_free_tree();
}
void _M_ref_nonnil()
{
_M_incr();
}
static void _S_unref(_Self* __t)
{
if (0 != __t) {
__t->_M_unref_nonnil();
}
}
static void _S_ref(_Self* __t)
{
if (0 != __t) __t->_M_incr();
}
static void _S_free_if_unref(_Self* __t)
{
if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
}
# else /* __GC */
void _M_unref_nonnil() {}
void _M_ref_nonnil() {}
static void _S_unref(_Self*) {}
static void _S_ref(_Self*) {}
static void _S_free_if_unref(_Self*) {}
# endif
__ROPE_DEFINE_ALLOCS(_Alloc, _M_size)
};
template<class _CharT, class _Alloc>
struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
public:
__GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
/* The allocated size is */
/* _S_rounded_up_size(size), except */
/* in the GC case, in which it */
/* doesn't matter. */
typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
_Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t _p_size, allocator_type __a)
: _Rope_RopeRep<_CharT,_Alloc>(_S_leaf, 0, true, _p_size, __a),
_M_data(__d)
{
__stl_assert(_p_size > 0);
if (_S_is_basic_char_type((_CharT *)0)) {
// already eos terminated.
_M_c_string = __d;
}
}
// The constructor assumes that d has been allocated with
// the proper allocator and the properly padded size.
// In contrast, the destructor deallocates the data:
# ifndef __GC
~_Rope_RopeLeaf() {
if (_M_data != _M_c_string) {
_M_free_c_string();
}
_S_free_string(_M_data, _M_size._M_data, get_allocator());
}
# endif
};
template<class _CharT, class _Alloc>
struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
public:
_Rope_RopeRep<_CharT,_Alloc>* _M_left;
_Rope_RopeRep<_CharT,_Alloc>* _M_right;
typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
_Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
_Rope_RopeRep<_CharT,_Alloc>* __r,
allocator_type __a)
: _Rope_RopeRep<_CharT,_Alloc>(
_S_concat, max(__l->_M_depth, __r->_M_depth) + 1, false,
__l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
{}
# ifndef __GC
~_Rope_RopeConcatenation() {
_M_free_c_string();
_M_left->_M_unref_nonnil();
_M_right->_M_unref_nonnil();
}
# endif
};
template<class _CharT, class _Alloc>
struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
public:
char_producer<_CharT>* _M_fn;
# ifndef __GC
bool _M_delete_when_done; // Char_producer is owned by the
// rope and should be explicitly
// deleted when the rope becomes
// inaccessible.
# else
// In the GC case, we either register the rope for
// finalization, or not. Thus the field is unnecessary;
// the information is stored in the collector data structures.
// We do need a finalization procedure to be invoked by the
// collector.
static void _S_fn_finalization_proc(void * __tree, void *) {
delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
}
# endif
typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
_Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
bool __d, allocator_type __a)
:
_Rope_RopeRep<_CharT,_Alloc>(_S_function, 0, true, _p_size, __a),
_M_fn(__f)
# ifndef __GC
, _M_delete_when_done(__d)
# endif
{
__stl_assert(_p_size > 0);
# ifdef __GC
if (__d) {
GC_REGISTER_FINALIZER(
this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
}
# endif
}
# ifndef __GC
~_Rope_RopeFunction() {
_M_free_c_string();
if (_M_delete_when_done) {
delete _M_fn;
}
}
# endif
};
// Substring results are usually represented using just
// concatenation nodes. But in the case of very long flat ropes
// or ropes with a functional representation that isn't practical.
// In that case, we represent the __result as a special case of
// RopeFunction, whose char_producer points back to the rope itself.
// In all cases except repeated substring operations and
// deallocation, we treat the __result as a RopeFunction.
template<class _CharT, class _Alloc>
struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
public char_producer<_CharT> {
public:
// XXX this whole class should be rewritten.
_Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0
size_t _M_start;
virtual void operator()(size_t __start_pos, size_t __req_len,
_CharT* __buffer) {
switch(_M_base->_M_tag) {
case _S_function:
case _S_substringfn:
{
char_producer<_CharT>* __fn =
((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
__stl_assert(__start_pos + __req_len <= _M_size._M_data);
__stl_assert(_M_start + _M_size._M_data <= _M_base->_M_size._M_data);
(*__fn)(__start_pos + _M_start, __req_len, __buffer);
}
break;
case _S_leaf:
{
__GC_CONST _CharT* __s =
((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
__buffer);
}
break;
default:
__stl_assert(false);
}
}
typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
_Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
size_t __l, allocator_type __a)
: _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
_M_base(__b),
_M_start(__s)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -