📄 stl_rope.h
字号:
while (!_S_is0(*__p)) { ++__p; }
return (__p - __s);
}
public: /* for operators */
rope(_RopeRep* __t, const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, __t) { }
private:
// Copy __r to the _CharT buffer.
// Returns __buffer + __r->_M_size._M_data.
// Assumes that buffer is uninitialized.
static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
// Again, with explicit starting position and length.
// Assumes that buffer is uninitialized.
static _CharT* _S_flatten(_RopeRep* __r,
size_t __start, size_t __len,
_CharT* __buffer);
// fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
public:
static const unsigned long _S_min_len[46];
protected:
static bool _S_is_balanced(_RopeRep* __r)
{ return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
static bool _S_is_almost_balanced(_RopeRep* __r)
{ return (__r->_M_depth == 0 ||
__r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]); }
static bool _S_is_roughly_balanced(_RopeRep* __r)
{ return (__r->_M_depth <= 1 ||
__r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]); }
// Assumes the result is not empty.
static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
_RopeRep* __right)
{
_RopeRep* __result = _S_concat_rep(__left, __right);
if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
return __result;
}
// The basic rebalancing operation. Logically copies the
// rope. The result has refcount of 1. The client will
// usually decrement the reference count of __r.
// The result is within height 2 of balanced by the above
// definition.
static _RopeRep* _S_balance(_RopeRep* __r);
// Add all unbalanced subtrees to the forest of balanceed trees.
// Used only by balance.
static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
// Add __r to forest, assuming __r is already balanced.
static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
// Print to stdout, exposing structure
static void _S_dump(_RopeRep* __r, int __indent = 0);
// Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
public:
bool empty() const { return 0 == _M_tree_ptr._M_data; }
// Comparison member function. This is public only for those
// clients that need a ternary comparison. Others
// should use the comparison operators below.
int compare(const _Self& __y) const {
return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
}
rope(const _CharT* __s, const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),__a))
{ }
rope(const _CharT* __s, size_t __len,
const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, (__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a)))
{ }
// Should perhaps be templatized with respect to the iterator type
// and use Sequence_buffer. (It should perhaps use sequence_buffer
// even now.)
rope(const _CharT *__s, const _CharT *__e,
const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a))
{ }
rope(const const_iterator& __s, const const_iterator& __e,
const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
__e._M_current_pos))
{ }
rope(const iterator& __s, const iterator& __e,
const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
__e._M_current_pos))
{ }
rope(_CharT __c, const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, (_RopeRep*)0)
{
_CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
construct(__buf, __c);
__STL_TRY {
_M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
}
__STL_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
}
rope(size_t __n, _CharT __c,
const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type)):
_M_tree_ptr(__a, (_RopeRep*)0) {
rope<_CharT,_Alloc> __result;
# define __exponentiate_threshold size_t(32)
size_t __exponent;
size_t __rest;
_CharT* __rest_buffer;
_RopeRep* __remainder;
rope<_CharT,_Alloc> __remainder_rope;
// gcc-2.7.2 bugs
typedef _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
if (0 == __n)
return;
__exponent = __n / __exponentiate_threshold;
__rest = __n % __exponentiate_threshold;
if (0 == __rest) {
__remainder = 0;
} else {
__rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
uninitialized_fill_n(__rest_buffer, __rest, __c);
_S_cond_store_eos(__rest_buffer[__rest]);
__STL_TRY {
__remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
}
__STL_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
}
__remainder_rope._M_tree_ptr._M_data = __remainder;
if (__exponent != 0) {
_CharT* __base_buffer =
_M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
_RopeLeaf* __base_leaf;
rope<_CharT,_Alloc> __base_rope;
uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
_S_cond_store_eos(__base_buffer[__exponentiate_threshold]);
__STL_TRY {
__base_leaf = _S_new_RopeLeaf(__base_buffer,
__exponentiate_threshold, __a);
}
__STL_UNWIND(_RopeRep::_S_free_string(__base_buffer,
__exponentiate_threshold, __a))
__base_rope._M_tree_ptr._M_data = __base_leaf;
if (1 == __exponent) {
__result = __base_rope;
# ifndef __GC
__stl_assert(2 == __result._M_tree_ptr._M_data->_M_ref_count);
// One each for base_rope and __result
# endif
} else {
__result = power(__base_rope, __exponent, _Concat_fn());
}
if (0 != __remainder) {
__result += __remainder_rope;
}
} else {
__result = __remainder_rope;
}
_M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
_M_tree_ptr._M_data->_M_ref_nonnil();
# undef __exponentiate_threshold
}
rope(const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, (_RopeRep*)0) {}
// Construct a rope from a function that can compute its members
rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
const allocator_type& __a = __STL_ALLOC_INSTANCE(allocator_type))
: _M_tree_ptr(__a, (_RopeRep*)0)
{
_M_tree_ptr._M_data = (0 == __len) ?
0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
}
rope(const _Self& __x)
: _M_tree_ptr(__x.get_allocator(), __x._M_tree_ptr._M_data)
{
_S_ref(_M_tree_ptr._M_data);
}
~rope()
{
_S_unref(_M_tree_ptr._M_data);
}
_Self& operator=(const _Self& __x)
{
_RopeRep* __old = _M_tree_ptr._M_data;
__stl_assert(get_allocator() == __x.get_allocator());
_M_tree_ptr._M_data = __x._M_tree_ptr._M_data;
_S_ref(_M_tree_ptr._M_data);
_S_unref(__old);
return(*this);
}
void clear()
{
_S_unref(_M_tree_ptr._M_data);
_M_tree_ptr._M_data = 0;
}
void push_back(_CharT __x)
{
_RopeRep* __old = _M_tree_ptr._M_data;
_M_tree_ptr._M_data = _S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1);
_S_unref(__old);
}
void pop_back()
{
_RopeRep* __old = _M_tree_ptr._M_data;
_M_tree_ptr._M_data =
_S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
_S_unref(__old);
}
_CharT back() const
{
return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
}
void push_front(_CharT __x)
{
_RopeRep* __old = _M_tree_ptr._M_data;
_RopeRep* __left =
__STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
__STL_TRY {
_M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
_S_unref(__old);
_S_unref(__left);
}
__STL_UNWIND(_S_unref(__left))
}
void pop_front()
{
_RopeRep* __old = _M_tree_ptr._M_data;
_M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
_S_unref(__old);
}
_CharT front() const
{
return _S_fetch(_M_tree_ptr._M_data, 0);
}
void balance()
{
_RopeRep* __old = _M_tree_ptr._M_data;
_M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
_S_unref(__old);
}
void copy(_CharT* __buffer) const {
destroy(__buffer, __buffer + size());
_S_flatten(_M_tree_ptr._M_data, __buffer);
}
// This is the copy function from the standard, but
// with the arguments reordered to make it consistent with the
// rest of the interface.
// Note that this guaranteed not to compile if the draft standard
// order is assumed.
size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
{
size_t _p_size = size();
size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
destroy(__buffer, __buffer + __len);
_S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
return __len;
}
// Print to stdout, exposing structure. May be useful for
// performance debugging.
void dump() {
_S_dump(_M_tree_ptr._M_data);
}
// Convert to 0 terminated string in new allocated memory.
// Embedded 0s in the input do not terminate the copy.
const _CharT* c_str() const;
// As above, but lso use the flattened representation as the
// the new rope representation.
const _CharT* replace_with_c_str();
// Reclaim memory for the c_str generated flattened string.
// Intentionally undocumented, since it's hard to say when this
// is safe for multiple threads.
void delete_c_str () {
if (0 == _M_tree_ptr._M_data) return;
if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
_M_tree_ptr._M_data->_M_c_string) {
// Representation shared
return;
}
# ifndef __GC
_M_tree_ptr._M_data->_M_free_c_string();
# endif
_M_tree_ptr._M_data->_M_c_string = 0;
}
_CharT operator[] (size_type __pos) const {
return _S_fetch(_M_tree_ptr._M_data, __pos);
}
_CharT at(size_type __pos) const {
// if (__pos >= size()) throw out_of_range; // XXX
return (*this)[__pos];
}
const_iterator begin() const {
return(const_iterator(_M_tree_ptr._M_data, 0));
}
// An easy way to get a const iterator from a non-const container.
const_iterator const_begin() const {
return(const_iterator(_M_tree_ptr._M_data, 0));
}
const_iterator end() const {
return(const_iterator(_M_tree_ptr._M_data, size()));
}
const_iterator const_end() const {
return(const_iterator(_M_tree_ptr._M_data, size()));
}
size_type size() const {
return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
}
size_type le
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -