1 // SGI's rope implementation -*- C++ -*-
3 // Copyright (C) 2001 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 2, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // You should have received a copy of the GNU General Public License along
17 // with this library; see the file COPYING. If not, write to the Free
18 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307,
21 // As a special exception, you may use this file as part of a free software
22 // library without restriction. Specifically, if other files instantiate
23 // templates or use macros or inline functions from this file, or you compile
24 // this file and link it with other files to produce an executable, this
25 // file does not by itself cause the resulting executable to be covered by
26 // the GNU General Public License. This exception does not however
27 // invalidate any other reasons why the executable file might be covered by
28 // the GNU General Public License.
31 * Copyright (c) 1997-1998
32 * Silicon Graphics Computer Systems, Inc.
34 * Permission to use, copy, modify, distribute and sell this software
35 * and its documentation for any purpose is hereby granted without fee,
36 * provided that the above copyright notice appear in all copies and
37 * that both that copyright notice and this permission notice appear
38 * in supporting documentation. Silicon Graphics makes no
39 * representations about the suitability of this software for any
40 * purpose. It is provided "as is" without express or implied warranty.
43 /* NOTE: This is an internal header file, included by other STL headers.
44 * You should not attempt to use it directly.
47 // rope<_CharT,_Alloc> is a sequence of _CharT.
48 // Ropes appear to be mutable, but update operations
49 // really copy enough of the data structure to leave the original
50 // valid. Thus ropes can be logically copied by just copying
53 #ifndef __SGI_STL_INTERNAL_ROPE_H
54 # define __SGI_STL_INTERNAL_ROPE_H
57 # define __GC_CONST const
59 # include <bits/stl_threads.h>
60 # define __GC_CONST // constant except for deallocation
62 # ifdef __STL_SGI_THREADS
69 // The _S_eos function is used for those functions that
70 // convert to/from C-like strings to detect the end of the string.
72 // The end-of-C-string character.
73 // This is what the draft standard says it should be.
74 template <class _CharT>
75 inline _CharT _S_eos(_CharT*) { return _CharT(); }
77 // Test for basic character types.
78 // For basic character types leaves having a trailing eos.
79 template <class _CharT>
80 inline bool _S_is_basic_char_type(_CharT*) { return false; }
81 template <class _CharT>
82 inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
84 inline bool _S_is_basic_char_type(char*) { return true; }
85 inline bool _S_is_one_byte_char_type(char*) { return true; }
86 inline bool _S_is_basic_char_type(wchar_t*) { return true; }
88 // Store an eos iff _CharT is a basic character type.
89 // Do not reference _S_eos if it isn't.
90 template <class _CharT>
91 inline void _S_cond_store_eos(_CharT&) {}
93 inline void _S_cond_store_eos(char& __c) { __c = 0; }
94 inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
96 // char_producers are logically functions that generate a section of
97 // a string. These can be convereted to ropes. The resulting rope
98 // invokes the char_producer on demand. This allows, for example,
99 // files to be viewed as ropes without reading the entire file.
100 template <class _CharT>
101 class char_producer {
103 virtual ~char_producer() {};
104 virtual void operator()(size_t __start_pos, size_t __len,
105 _CharT* __buffer) = 0;
106 // Buffer should really be an arbitrary output iterator.
107 // That way we could flatten directly into an ostream, etc.
108 // This is thoroughly impossible, since iterator types don't
109 // have runtime descriptions.
114 // Sequence must provide an append operation that appends an
115 // array to the sequence. Sequence buffers are useful only if
116 // appending an entire array is cheaper than appending element by element.
117 // This is true for many string representations.
118 // This should perhaps inherit from ostream<sequence::value_type>
119 // and be implemented correspondingly, so that they can be used
120 // for formatted. For the sake of portability, we don't do this yet.
122 // For now, sequence buffers behave as output iterators. But they also
123 // behave a little like basic_ostringstream<sequence::value_type> and a
124 // little like containers.
126 template<class _Sequence, size_t _Buf_sz = 100>
127 class sequence_buffer : public output_iterator {
129 typedef typename _Sequence::value_type value_type;
131 _Sequence* _M_prefix;
132 value_type _M_buffer[_Buf_sz];
136 _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
139 ~sequence_buffer() { flush(); }
140 sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
141 sequence_buffer(const sequence_buffer& __x) {
142 _M_prefix = __x._M_prefix;
143 _M_buf_count = __x._M_buf_count;
144 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
146 sequence_buffer(sequence_buffer& __x) {
148 _M_prefix = __x._M_prefix;
151 sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
152 sequence_buffer& operator= (sequence_buffer& __x) {
154 _M_prefix = __x._M_prefix;
158 sequence_buffer& operator= (const sequence_buffer& __x) {
159 _M_prefix = __x._M_prefix;
160 _M_buf_count = __x._M_buf_count;
161 copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
164 void push_back(value_type __x)
166 if (_M_buf_count < _Buf_sz) {
167 _M_buffer[_M_buf_count] = __x;
175 void append(value_type* __s, size_t __len)
177 if (__len + _M_buf_count <= _Buf_sz) {
178 size_t __i = _M_buf_count;
180 for (; __j < __len; __i++, __j++) {
181 _M_buffer[__i] = __s[__j];
183 _M_buf_count += __len;
184 } else if (0 == _M_buf_count) {
185 _M_prefix->append(__s, __s + __len);
191 sequence_buffer& write(value_type* __s, size_t __len)
196 sequence_buffer& put(value_type __x)
201 sequence_buffer& operator=(const value_type& __rhs)
206 sequence_buffer& operator*() { return *this; }
207 sequence_buffer& operator++() { return *this; }
208 sequence_buffer& operator++(int) { return *this; }
211 // The following should be treated as private, at least for now.
212 template<class _CharT>
213 class _Rope_char_consumer {
215 // If we had member templates, these should not be virtual.
216 // For now we need to use run-time parametrization where
217 // compile-time would do. Hence this should all be private
219 // The symmetry with char_producer is accidental and temporary.
220 virtual ~_Rope_char_consumer() {};
221 virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
224 // First a lot of forward declarations. The standard seems to require
225 // much stricter "declaration before use" than many of the implementations
227 template<class _CharT, class _Alloc=allocator<_CharT> > class rope;
228 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
229 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
230 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
231 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
232 template<class _CharT, class _Alloc> class _Rope_iterator;
233 template<class _CharT, class _Alloc> class _Rope_const_iterator;
234 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
235 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
237 template<class _CharT, class _Alloc>
238 bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
239 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y);
241 template<class _CharT, class _Alloc>
242 _Rope_const_iterator<_CharT,_Alloc> operator-
243 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
246 template<class _CharT, class _Alloc>
247 _Rope_const_iterator<_CharT,_Alloc> operator+
248 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
251 template<class _CharT, class _Alloc>
252 _Rope_const_iterator<_CharT,_Alloc> operator+
254 const _Rope_const_iterator<_CharT,_Alloc>& __x);
256 template<class _CharT, class _Alloc>
258 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
259 const _Rope_const_iterator<_CharT,_Alloc>& __y);
261 template<class _CharT, class _Alloc>
263 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
264 const _Rope_const_iterator<_CharT,_Alloc>& __y);
266 template<class _CharT, class _Alloc>
268 (const _Rope_const_iterator<_CharT,_Alloc>& __x,
269 const _Rope_const_iterator<_CharT,_Alloc>& __y);
271 template<class _CharT, class _Alloc>
272 _Rope_iterator<_CharT,_Alloc> operator-
273 (const _Rope_iterator<_CharT,_Alloc>& __x,
276 template<class _CharT, class _Alloc>
277 _Rope_iterator<_CharT,_Alloc> operator+
278 (const _Rope_iterator<_CharT,_Alloc>& __x,
281 template<class _CharT, class _Alloc>
282 _Rope_iterator<_CharT,_Alloc> operator+
284 const _Rope_iterator<_CharT,_Alloc>& __x);
286 template<class _CharT, class _Alloc>
288 (const _Rope_iterator<_CharT,_Alloc>& __x,
289 const _Rope_iterator<_CharT,_Alloc>& __y);
291 template<class _CharT, class _Alloc>
293 (const _Rope_iterator<_CharT,_Alloc>& __x,
294 const _Rope_iterator<_CharT,_Alloc>& __y);
296 template<class _CharT, class _Alloc>
298 (const _Rope_iterator<_CharT,_Alloc>& __x,
299 const _Rope_iterator<_CharT,_Alloc>& __y);
301 template<class _CharT, class _Alloc>
302 rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
303 const rope<_CharT,_Alloc>& __right);
305 template<class _CharT, class _Alloc>
306 rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
307 const _CharT* __right);
309 template<class _CharT, class _Alloc>
310 rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
313 // Some helpers, so we can use power on ropes.
314 // See below for why this isn't local to the implementation.
316 // This uses a nonstandard refcount convention.
317 // The result has refcount 0.
318 template<class _CharT, class _Alloc>
319 struct _Rope_Concat_fn
320 : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
321 rope<_CharT,_Alloc> > {
322 rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
323 const rope<_CharT,_Alloc>& __y) {
328 template <class _CharT, class _Alloc>
331 identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
333 return rope<_CharT,_Alloc>();
338 // What follows should really be local to rope. Unfortunately,
339 // that doesn't work, since it makes it impossible to define generic
340 // equality on rope iterators. According to the draft standard, the
341 // template parameters for such an equality operator cannot be inferred
342 // from the occurence of a member class as a parameter.
343 // (SGI compilers in fact allow this, but the __result wouldn't be
345 // Similarly, some of the static member functions are member functions
346 // only to avoid polluting the global namespace, and to circumvent
347 // restrictions on type inference for template functions.
351 // The internal data structure for representing a rope. This is
352 // private to the implementation. A rope is really just a pointer
355 // A few basic functions for manipulating this data structure
356 // are members of _RopeRep. Most of the more complex algorithms
357 // are implemented as rope members.
359 // Some of the static member functions of _RopeRep have identically
360 // named functions in rope that simply invoke the _RopeRep versions.
362 // A macro to introduce various allocation and deallocation functions
363 // These need to be defined differently depending on whether or not
364 // we are using standard conforming allocators, and whether the allocator
365 // instances have real state. Thus this macro is invoked repeatedly
366 // with different definitions of __ROPE_DEFINE_ALLOC.
367 // __ROPE_DEFINE_ALLOC(type,name) defines
368 // type * name_allocate(size_t) and
369 // void name_deallocate(tipe *, size_t)
370 // Both functions may or may not be static.
372 #define __ROPE_DEFINE_ALLOCS(__a) \
373 __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \
374 typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
375 __ROPE_DEFINE_ALLOC(__C,_C) \
376 typedef _Rope_RopeLeaf<_CharT,__a> __L; \
377 __ROPE_DEFINE_ALLOC(__L,_L) \
378 typedef _Rope_RopeFunction<_CharT,__a> __F; \
379 __ROPE_DEFINE_ALLOC(__F,_F) \
380 typedef _Rope_RopeSubstring<_CharT,__a> __S; \
381 __ROPE_DEFINE_ALLOC(__S,_S)
383 // Internal rope nodes potentially store a copy of the allocator
384 // instance used to allocate them. This is mostly redundant.
385 // But the alternative would be to pass allocator instances around
386 // in some form to nearly all internal functions, since any pointer
387 // assignment may result in a zero reference count and thus require
389 // The _Rope_rep_base class encapsulates
390 // the differences between SGI-style allocators and standard-conforming
393 #define __STATIC_IF_SGI_ALLOC /* not static */
395 // Base class for ordinary allocators.
396 template <class _CharT, class _Allocator, bool _IsStatic>
397 class _Rope_rep_alloc_base {
399 typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
401 allocator_type get_allocator() const { return _M_data_allocator; }
402 _Rope_rep_alloc_base(size_t __size, const allocator_type& __a)
403 : _M_size(__size), _M_data_allocator(__a) {}
404 size_t _M_size; // This is here only to avoid wasting space
405 // for an otherwise empty base class.
409 allocator_type _M_data_allocator;
411 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
413 _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
414 /*static*/ _Tp * __name##_allocate(size_t __n) \
415 { return __name##Allocator(_M_data_allocator).allocate(__n); } \
416 void __name##_deallocate(_Tp* __p, size_t __n) \
417 { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
418 __ROPE_DEFINE_ALLOCS(_Allocator);
419 # undef __ROPE_DEFINE_ALLOC
422 // Specialization for allocators that have the property that we don't
423 // actually have to store an allocator object.
424 template <class _CharT, class _Allocator>
425 class _Rope_rep_alloc_base<_CharT,_Allocator,true> {
427 typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
429 allocator_type get_allocator() const { return allocator_type(); }
430 _Rope_rep_alloc_base(size_t __size, const allocator_type&)
436 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
438 _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
440 _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
441 static _Tp* __name##_allocate(size_t __n) \
442 { return __name##Alloc::allocate(__n); } \
443 void __name##_deallocate(_Tp *__p, size_t __n) \
444 { __name##Alloc::deallocate(__p, __n); }
445 __ROPE_DEFINE_ALLOCS(_Allocator);
446 # undef __ROPE_DEFINE_ALLOC
449 template <class _CharT, class _Alloc>
450 struct _Rope_rep_base
451 : public _Rope_rep_alloc_base<_CharT,_Alloc,
452 _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
454 typedef _Rope_rep_alloc_base<_CharT,_Alloc,
455 _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
457 typedef typename _Base::allocator_type allocator_type;
458 _Rope_rep_base(size_t __size, const allocator_type& __a)
459 : _Base(__size, __a) {}
463 template<class _CharT, class _Alloc>
464 struct _Rope_RopeRep : public _Rope_rep_base<_CharT,_Alloc>
470 enum { _S_max_rope_depth = 45 };
471 enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
473 bool _M_is_balanced:8;
474 unsigned char _M_depth;
475 __GC_CONST _CharT* _M_c_string;
476 /* Flattened version of string, if needed. */
478 /* If it's not 0, then the memory is owned */
480 /* In the case of a leaf, this may point to */
481 /* the same memory as the data field. */
482 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
484 _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t __size,
486 : _Rope_rep_base<_CharT,_Alloc>(__size, __a),
490 _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0)
495 static void _S_free_string(__GC_CONST _CharT*, size_t __len,
497 # define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);
498 // Deallocate data section of a leaf.
499 // This shouldn't be a member function.
500 // But its hard to do anything else at the
501 // moment, because it's templatized w.r.t.
503 // Does nothing if __GC is defined.
505 void _M_free_c_string();
507 // Deallocate t. Assumes t is not 0.
508 void _M_unref_nonnil()
510 if (0 == _M_decr()) _M_free_tree();
516 static void _S_unref(_Rope_RopeRep* __t)
519 __t->_M_unref_nonnil();
522 static void _S_ref(_Rope_RopeRep* __t)
524 if (0 != __t) __t->_M_incr();
526 static void _S_free_if_unref(_Rope_RopeRep* __t)
528 if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
531 void _M_unref_nonnil() {}
532 void _M_ref_nonnil() {}
533 static void _S_unref(_Rope_RopeRep*) {}
534 static void _S_ref(_Rope_RopeRep*) {}
535 static void _S_free_if_unref(_Rope_RopeRep*) {}
540 template<class _CharT, class _Alloc>
541 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
543 // Apparently needed by VC++
544 // The data fields of leaves are allocated with some
545 // extra space, to accomodate future growth and for basic
546 // character types, to hold a trailing eos character.
547 enum { _S_alloc_granularity = 8 };
548 static size_t _S_rounded_up_size(size_t __n) {
549 size_t __size_with_eos;
551 if (_S_is_basic_char_type((_CharT*)0)) {
552 __size_with_eos = __n + 1;
554 __size_with_eos = __n;
557 return __size_with_eos;
559 // Allow slop for in-place expansion.
560 return (__size_with_eos + _S_alloc_granularity-1)
561 &~ (_S_alloc_granularity-1);
564 __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
565 /* The allocated size is */
566 /* _S_rounded_up_size(size), except */
567 /* in the GC case, in which it */
568 /* doesn't matter. */
569 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
571 _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t __size, allocator_type __a)
572 : _Rope_RopeRep<_CharT,_Alloc>(_S_leaf, 0, true, __size, __a),
575 __stl_assert(__size > 0);
576 if (_S_is_basic_char_type((_CharT *)0)) {
577 // already eos terminated.
581 // The constructor assumes that d has been allocated with
582 // the proper allocator and the properly padded size.
583 // In contrast, the destructor deallocates the data:
586 if (_M_data != _M_c_string) {
589 __STL_FREE_STRING(_M_data, _M_size, get_allocator());
594 template<class _CharT, class _Alloc>
595 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
597 _Rope_RopeRep<_CharT,_Alloc>* _M_left;
598 _Rope_RopeRep<_CharT,_Alloc>* _M_right;
599 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
601 _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
602 _Rope_RopeRep<_CharT,_Alloc>* __r,
605 : _Rope_RopeRep<_CharT,_Alloc>(_S_concat,
606 max(__l->_M_depth, __r->_M_depth) + 1,
608 __l->_M_size + __r->_M_size, __a),
609 _M_left(__l), _M_right(__r)
612 ~_Rope_RopeConcatenation() {
614 _M_left->_M_unref_nonnil();
615 _M_right->_M_unref_nonnil();
620 template<class _CharT, class _Alloc>
621 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
623 char_producer<_CharT>* _M_fn;
625 bool _M_delete_when_done; // Char_producer is owned by the
626 // rope and should be explicitly
627 // deleted when the rope becomes
630 // In the GC case, we either register the rope for
631 // finalization, or not. Thus the field is unnecessary;
632 // the information is stored in the collector data structures.
633 // We do need a finalization procedure to be invoked by the
635 static void _S_fn_finalization_proc(void * __tree, void *) {
636 delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
639 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
641 _Rope_RopeFunction(char_producer<_CharT>* __f, size_t __size,
642 bool __d, allocator_type __a)
643 : _Rope_RopeRep<_CharT,_Alloc>(_S_function, 0, true, __size, __a)
646 , _M_delete_when_done(__d)
649 __stl_assert(__size > 0);
652 GC_REGISTER_FINALIZER(
653 this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
658 ~_Rope_RopeFunction() {
660 if (_M_delete_when_done) {
666 // Substring results are usually represented using just
667 // concatenation nodes. But in the case of very long flat ropes
668 // or ropes with a functional representation that isn't practical.
669 // In that case, we represent the __result as a special case of
670 // RopeFunction, whose char_producer points back to the rope itself.
671 // In all cases except repeated substring operations and
672 // deallocation, we treat the __result as a RopeFunction.
673 template<class _CharT, class _Alloc>
674 struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
675 public char_producer<_CharT> {
677 // XXX this whole class should be rewritten.
678 _Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0
680 virtual void operator()(size_t __start_pos, size_t __req_len,
682 switch(_M_base->_M_tag) {
686 char_producer<_CharT>* __fn =
687 ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
688 __stl_assert(__start_pos + __req_len <= _M_size);
689 __stl_assert(_M_start + _M_size <= _M_base->_M_size);
690 (*__fn)(__start_pos + _M_start, __req_len, __buffer);
695 __GC_CONST _CharT* __s =
696 ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
697 uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
705 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
707 _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
708 size_t __l, allocator_type __a)
709 : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
710 char_producer<_CharT>(),
714 __stl_assert(__l > 0);
715 __stl_assert(__s + __l <= __b->_M_size);
717 _M_base->_M_ref_nonnil();
719 _M_tag = _S_substringfn;
721 virtual ~_Rope_RopeSubstring()
724 _M_base->_M_unref_nonnil();
725 // _M_free_c_string(); -- done by parent class
731 // Self-destructing pointers to Rope_rep.
732 // These are not conventional smart pointers. Their
733 // only purpose in life is to ensure that unref is called
734 // on the pointer either at normal exit or if an exception
735 // is raised. It is the caller's responsibility to
736 // adjust reference counts when these pointers are initialized
737 // or assigned to. (This convention significantly reduces
738 // the number of potentially expensive reference count
741 template<class _CharT, class _Alloc>
742 struct _Rope_self_destruct_ptr {
743 _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
744 ~_Rope_self_destruct_ptr()
745 { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
746 # ifdef __STL_USE_EXCEPTIONS
747 _Rope_self_destruct_ptr() : _M_ptr(0) {};
749 _Rope_self_destruct_ptr() {};
751 _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
752 _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
753 _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
754 operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
755 _Rope_self_destruct_ptr& operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
756 { _M_ptr = __x; return *this; }
760 // Dereferencing a nonconst iterator has to return something
761 // that behaves almost like a reference. It's not possible to
762 // return an actual reference since assignment requires extra
763 // work. And we would get into the same problems as with the
764 // CD2 version of basic_string.
765 template<class _CharT, class _Alloc>
766 class _Rope_char_ref_proxy {
767 friend class rope<_CharT,_Alloc>;
768 friend class _Rope_iterator<_CharT,_Alloc>;
769 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
771 typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
773 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
775 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
776 typedef rope<_CharT,_Alloc> _My_rope;
779 bool _M_current_valid;
780 _My_rope* _M_root; // The whole rope.
782 _Rope_char_ref_proxy(_My_rope* __r, size_t __p)
783 : _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
784 _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x)
785 : _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
786 // Don't preserve cache if the reference can outlive the
787 // expression. We claim that's not possible without calling
788 // a copy constructor or generating reference to a proxy
789 // reference. We declare the latter to have undefined semantics.
790 _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
791 : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
792 inline operator _CharT () const;
793 _Rope_char_ref_proxy& operator= (_CharT __c);
794 _Rope_char_ptr_proxy<_CharT,_Alloc> operator& () const;
795 _Rope_char_ref_proxy& operator= (const _Rope_char_ref_proxy& __c) {
796 return operator=((_CharT)__c);
800 template<class _CharT, class __Alloc>
801 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
802 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
808 template<class _CharT, class _Alloc>
809 class _Rope_char_ptr_proxy {
810 // XXX this class should be rewritten.
811 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
813 rope<_CharT,_Alloc>* _M_root; // The whole rope.
815 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
816 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
817 _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x)
818 : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
819 _Rope_char_ptr_proxy() {}
820 _Rope_char_ptr_proxy(_CharT* __x) : _M_root(0), _M_pos(0) {
821 __stl_assert(0 == __x);
823 _Rope_char_ptr_proxy&
824 operator= (const _Rope_char_ptr_proxy& __x) {
826 _M_root = __x._M_root;
829 template<class _CharT2, class _Alloc2>
830 friend bool operator== (const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __x,
831 const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __y);
832 _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
833 return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
839 // Unlike in the C version, we cache only part of the stack
840 // for rope iterators, since they must be efficiently copyable.
841 // When we run out of cache, we have to reconstruct the iterator
843 // Pointers from iterators are not included in reference counts.
844 // Iterators are assumed to be thread private. Ropes can
847 template<class _CharT, class _Alloc>
848 class _Rope_iterator_base
849 : public random_access_iterator<_CharT, ptrdiff_t> {
850 friend class rope<_CharT,_Alloc>;
852 typedef _Alloc _allocator_type; // used in _Rope_rotate, VC++ workaround
853 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
854 // Borland doesnt want this to be protected.
856 enum { _S_path_cache_len = 4 }; // Must be <= 9.
857 enum { _S_iterator_buf_len = 15 };
858 size_t _M_current_pos;
859 _RopeRep* _M_root; // The whole rope.
860 size_t _M_leaf_pos; // Starting position for current leaf
861 __GC_CONST _CharT* _M_buf_start;
863 // containing current char.
864 __GC_CONST _CharT* _M_buf_ptr;
865 // Pointer to current char in buffer.
866 // != 0 ==> buffer valid.
867 __GC_CONST _CharT* _M_buf_end;
868 // One past __last valid char in buffer.
869 // What follows is the path cache. We go out of our
870 // way to make this compact.
871 // Path_end contains the bottom section of the path from
872 // the root to the current leaf.
873 const _RopeRep* _M_path_end[_S_path_cache_len];
874 int _M_leaf_index; // Last valid __pos in path_end;
875 // _M_path_end[0] ... _M_path_end[leaf_index-1]
876 // point to concatenation nodes.
877 unsigned char _M_path_directions;
878 // (path_directions >> __i) & 1 is 1
879 // iff we got from _M_path_end[leaf_index - __i - 1]
880 // to _M_path_end[leaf_index - __i] by going to the
881 // __right. Assumes path_cache_len <= 9.
882 _CharT _M_tmp_buf[_S_iterator_buf_len];
883 // Short buffer for surrounding chars.
884 // This is useful primarily for
885 // RopeFunctions. We put the buffer
886 // here to avoid locking in the
887 // multithreaded case.
888 // The cached path is generally assumed to be valid
889 // only if the buffer is valid.
890 static void _S_setbuf(_Rope_iterator_base& __x);
891 // Set buffer contents given
893 static void _S_setcache(_Rope_iterator_base& __x);
894 // Set buffer contents and
896 static void _S_setcache_for_incr(_Rope_iterator_base& __x);
897 // As above, but assumes path
898 // cache is valid for previous posn.
899 _Rope_iterator_base() {}
900 _Rope_iterator_base(_RopeRep* __root, size_t __pos)
901 : _M_current_pos(__pos), _M_root(__root), _M_buf_ptr(0) {}
902 void _M_incr(size_t __n);
903 void _M_decr(size_t __n);
905 size_t index() const { return _M_current_pos; }
906 _Rope_iterator_base(const _Rope_iterator_base& __x) {
907 if (0 != __x._M_buf_ptr) {
910 _M_current_pos = __x._M_current_pos;
911 _M_root = __x._M_root;
917 template<class _CharT, class _Alloc> class _Rope_iterator;
919 template<class _CharT, class _Alloc>
920 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
921 friend class rope<_CharT,_Alloc>;
923 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
924 // The one from the base class may not be directly visible.
925 _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
926 _Rope_iterator_base<_CharT,_Alloc>(
927 const_cast<_RopeRep*>(__root), __pos)
928 // Only nonconst iterators modify root ref count
931 typedef _CharT reference; // Really a value. Returning a reference
932 // Would be a mess, since it would have
933 // to be included in refcount.
934 typedef const _CharT* pointer;
937 _Rope_const_iterator() {};
938 _Rope_const_iterator(const _Rope_const_iterator& __x) :
939 _Rope_iterator_base<_CharT,_Alloc>(__x) { }
940 _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x);
941 _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
942 _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) {}
943 _Rope_const_iterator& operator= (const _Rope_const_iterator& __x) {
944 if (0 != __x._M_buf_ptr) {
945 *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
947 _M_current_pos = __x._M_current_pos;
948 _M_root = __x._M_root;
953 reference operator*() {
954 if (0 == _M_buf_ptr) _S_setcache(*this);
957 _Rope_const_iterator& operator++() {
958 __GC_CONST _CharT* __next;
959 if (0 != _M_buf_ptr && (__next = _M_buf_ptr + 1) < _M_buf_end) {
967 _Rope_const_iterator& operator+=(ptrdiff_t __n) {
975 _Rope_const_iterator& operator--() {
979 _Rope_const_iterator& operator-=(ptrdiff_t __n) {
987 _Rope_const_iterator operator++(int) {
988 size_t __old_pos = _M_current_pos;
990 return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
991 // This makes a subsequent dereference expensive.
992 // Perhaps we should instead copy the iterator
993 // if it has a valid cache?
995 _Rope_const_iterator operator--(int) {
996 size_t __old_pos = _M_current_pos;
998 return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
1000 template<class _CharT2, class _Alloc2>
1001 friend _Rope_const_iterator<_CharT2,_Alloc2> operator-
1002 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1004 template<class _CharT2, class _Alloc2>
1005 friend _Rope_const_iterator<_CharT2,_Alloc2> operator+
1006 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1008 template<class _CharT2, class _Alloc2>
1009 friend _Rope_const_iterator<_CharT2,_Alloc2> operator+
1011 const _Rope_const_iterator<_CharT2,_Alloc2>& __x);
1012 reference operator[](size_t __n) {
1013 return rope<_CharT,_Alloc>::_S_fetch(_M_root, _M_current_pos + __n);
1016 template<class _CharT2, class _Alloc2>
1017 friend bool operator==
1018 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1019 const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
1020 template<class _CharT2, class _Alloc2>
1021 friend bool operator<
1022 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1023 const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
1024 template<class _CharT2, class _Alloc2>
1025 friend ptrdiff_t operator-
1026 (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
1027 const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
1030 template<class _CharT, class _Alloc>
1031 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1032 friend class rope<_CharT,_Alloc>;
1034 rope<_CharT,_Alloc>* _M_root_rope;
1035 // root is treated as a cached version of this,
1036 // and is used to detect changes to the underlying
1038 // Root is included in the reference count.
1039 // This is necessary so that we can detect changes reliably.
1040 // Unfortunately, it requires careful bookkeeping for the
1042 _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos)
1043 : _Rope_iterator_base<_CharT,_Alloc>(__r->_M_tree_ptr, __pos),
1045 { _RopeRep::_S_ref(_M_root); if (!(__r -> empty()))_S_setcache(*this); }
1049 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1050 typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
1053 rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
1055 _M_root = 0; // Needed for reference counting.
1057 _Rope_iterator(const _Rope_iterator& __x) :
1058 _Rope_iterator_base<_CharT,_Alloc>(__x) {
1059 _M_root_rope = __x._M_root_rope;
1060 _RopeRep::_S_ref(_M_root);
1062 _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
1064 _RopeRep::_S_unref(_M_root);
1066 _Rope_iterator& operator= (const _Rope_iterator& __x) {
1067 _RopeRep* __old = _M_root;
1069 _RopeRep::_S_ref(__x._M_root);
1070 if (0 != __x._M_buf_ptr) {
1071 _M_root_rope = __x._M_root_rope;
1072 *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
1074 _M_current_pos = __x._M_current_pos;
1075 _M_root = __x._M_root;
1076 _M_root_rope = __x._M_root_rope;
1079 _RopeRep::_S_unref(__old);
1082 reference operator*() {
1084 if (0 == _M_buf_ptr) {
1085 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1086 _M_root_rope, _M_current_pos);
1088 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1089 _M_root_rope, _M_current_pos, *_M_buf_ptr);
1092 _Rope_iterator& operator++() {
1096 _Rope_iterator& operator+=(ptrdiff_t __n) {
1104 _Rope_iterator& operator--() {
1108 _Rope_iterator& operator-=(ptrdiff_t __n) {
1116 _Rope_iterator operator++(int) {
1117 size_t __old_pos = _M_current_pos;
1119 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1121 _Rope_iterator operator--(int) {
1122 size_t __old_pos = _M_current_pos;
1124 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1126 reference operator[](ptrdiff_t __n) {
1127 return _Rope_char_ref_proxy<_CharT,_Alloc>(
1128 _M_root_rope, _M_current_pos + __n);
1131 template<class _CharT2, class _Alloc2>
1132 friend bool operator==
1133 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1134 const _Rope_iterator<_CharT2,_Alloc2>& __y);
1135 template<class _CharT2, class _Alloc2>
1136 friend bool operator<
1137 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1138 const _Rope_iterator<_CharT2,_Alloc2>& __y);
1139 template<class _CharT2, class _Alloc2>
1140 friend ptrdiff_t operator-
1141 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1142 const _Rope_iterator<_CharT2,_Alloc2>& __y);
1143 template<class _CharT2, class _Alloc2>
1144 friend _Rope_iterator<_CharT2,_Alloc2> operator-
1145 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1147 template<class _CharT2, class _Alloc2>
1148 friend _Rope_iterator<_CharT2,_Alloc2> operator+
1149 (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1151 template<class _CharT2, class _Alloc2>
1152 friend _Rope_iterator<_CharT2,_Alloc2> operator+
1154 const _Rope_iterator<_CharT2,_Alloc2>& __x);
1157 // The rope base class encapsulates
1158 // the differences between SGI-style allocators and standard-conforming
1161 // Base class for ordinary allocators.
1162 template <class _CharT, class _Allocator, bool _IsStatic>
1163 class _Rope_alloc_base {
1165 typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep;
1166 typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
1168 allocator_type get_allocator() const { return _M_data_allocator; }
1169 _Rope_alloc_base(_RopeRep *__t, const allocator_type& __a)
1170 : _M_tree_ptr(__t), _M_data_allocator(__a) {}
1171 _Rope_alloc_base(const allocator_type& __a)
1172 : _M_data_allocator(__a) {}
1175 // The only data members of a rope:
1176 allocator_type _M_data_allocator;
1177 _RopeRep* _M_tree_ptr;
1179 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1181 _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
1182 _Tp* __name##_allocate(size_t __n) const \
1183 { return __name##Allocator(_M_data_allocator).allocate(__n); } \
1184 void __name##_deallocate(_Tp *__p, size_t __n) const \
1185 { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
1186 __ROPE_DEFINE_ALLOCS(_Allocator)
1187 # undef __ROPE_DEFINE_ALLOC
1190 // Specialization for allocators that have the property that we don't
1191 // actually have to store an allocator object.
1192 template <class _CharT, class _Allocator>
1193 class _Rope_alloc_base<_CharT,_Allocator,true> {
1195 typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep;
1196 typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
1198 allocator_type get_allocator() const { return allocator_type(); }
1199 _Rope_alloc_base(_RopeRep *__t, const allocator_type&)
1200 : _M_tree_ptr(__t) {}
1201 _Rope_alloc_base(const allocator_type&) {}
1204 // The only data member of a rope:
1205 _RopeRep *_M_tree_ptr;
1207 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1209 _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
1211 _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
1212 static _Tp* __name##_allocate(size_t __n) \
1213 { return __name##Alloc::allocate(__n); } \
1214 static void __name##_deallocate(_Tp *__p, size_t __n) \
1215 { __name##Alloc::deallocate(__p, __n); }
1216 __ROPE_DEFINE_ALLOCS(_Allocator)
1217 # undef __ROPE_DEFINE_ALLOC
1220 template <class _CharT, class _Alloc>
1222 : public _Rope_alloc_base<_CharT,_Alloc,
1223 _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
1225 typedef _Rope_alloc_base<_CharT,_Alloc,
1226 _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
1228 typedef typename _Base::allocator_type allocator_type;
1229 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1230 // The one in _Base may not be visible due to template rules.
1231 _Rope_base(_RopeRep* __t, const allocator_type& __a) : _Base(__t, __a) {}
1232 _Rope_base(const allocator_type& __a) : _Base(__a) {}
1236 template <class _CharT, class _Alloc>
1237 class rope : public _Rope_base<_CharT,_Alloc> {
1239 typedef _CharT value_type;
1240 typedef ptrdiff_t difference_type;
1241 typedef size_t size_type;
1242 typedef _CharT const_reference;
1243 typedef const _CharT* const_pointer;
1244 typedef _Rope_iterator<_CharT,_Alloc> iterator;
1245 typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1246 typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1247 typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1249 friend class _Rope_iterator<_CharT,_Alloc>;
1250 friend class _Rope_const_iterator<_CharT,_Alloc>;
1251 friend struct _Rope_RopeRep<_CharT,_Alloc>;
1252 friend class _Rope_iterator_base<_CharT,_Alloc>;
1253 friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1254 friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1255 friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1258 typedef _Rope_base<_CharT,_Alloc> _Base;
1259 typedef typename _Base::allocator_type allocator_type;
1260 using _Base::_M_tree_ptr;
1261 typedef __GC_CONST _CharT* _Cstrptr;
1263 static _CharT _S_empty_c_str[1];
1265 static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
1266 enum { _S_copy_max = 23 };
1267 // For strings shorter than _S_copy_max, we copy to
1270 typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1271 typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1272 typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1273 typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1274 typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1276 // Retrieve a character at the indicated position.
1277 static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1280 // Obtain a pointer to the character at the indicated position.
1281 // The pointer can be used to change the character.
1282 // If such a pointer cannot be produced, as is frequently the
1283 // case, 0 is returned instead.
1284 // (Returns nonzero only if all nodes in the path have a refcount
1286 static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1289 static bool _S_apply_to_pieces(
1290 // should be template parameter
1291 _Rope_char_consumer<_CharT>& __c,
1292 const _RopeRep* __r,
1293 size_t __begin, size_t __end);
1294 // begin and end are assumed to be in range.
1297 static void _S_unref(_RopeRep* __t)
1299 _RopeRep::_S_unref(__t);
1301 static void _S_ref(_RopeRep* __t)
1303 _RopeRep::_S_ref(__t);
1306 static void _S_unref(_RopeRep*) {}
1307 static void _S_ref(_RopeRep*) {}
1312 typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1314 typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1317 // _Result is counted in refcount.
1318 static _RopeRep* _S_substring(_RopeRep* __base,
1319 size_t __start, size_t __endp1);
1321 static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1322 const _CharT* __iter, size_t __slen);
1323 // Concatenate rope and char ptr, copying __s.
1324 // Should really take an arbitrary iterator.
1325 // Result is counted in refcount.
1326 static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1327 const _CharT* __iter, size_t __slen)
1328 // As above, but one reference to __r is about to be
1329 // destroyed. Thus the pieces may be recycled if all
1330 // relevent reference counts are 1.
1332 // We can't really do anything since refcounts are unavailable.
1333 { return _S_concat_char_iter(__r, __iter, __slen); }
1338 static _RopeRep* _S_concat(_RopeRep* __left, _RopeRep* __right);
1339 // General concatenation on _RopeRep. _Result
1340 // has refcount of 1. Adjusts argument refcounts.
1343 void apply_to_pieces( size_t __begin, size_t __end,
1344 _Rope_char_consumer<_CharT>& __c) const {
1345 _S_apply_to_pieces(__c, _M_tree_ptr, __begin, __end);
1351 static size_t _S_rounded_up_size(size_t __n) {
1352 return _RopeLeaf::_S_rounded_up_size(__n);
1355 static size_t _S_allocated_capacity(size_t __n) {
1356 if (_S_is_basic_char_type((_CharT*)0)) {
1357 return _S_rounded_up_size(__n) - 1;
1359 return _S_rounded_up_size(__n);
1363 // Allocate and construct a RopeLeaf using the supplied allocator
1364 // Takes ownership of s instead of copying.
1365 static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
1366 size_t __size, allocator_type __a)
1368 _RopeLeaf* __space = _LAllocator(__a).allocate(1);
1369 return new(__space) _RopeLeaf(__s, __size, __a);
1372 static _RopeConcatenation* _S_new_RopeConcatenation(
1373 _RopeRep* __left, _RopeRep* __right,
1376 _RopeConcatenation* __space = _CAllocator(__a).allocate(1);
1377 return new(__space) _RopeConcatenation(__left, __right, __a);
1380 static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1381 size_t __size, bool __d, allocator_type __a)
1383 _RopeFunction* __space = _FAllocator(__a).allocate(1);
1384 return new(__space) _RopeFunction(__f, __size, __d, __a);
1387 static _RopeSubstring* _S_new_RopeSubstring(
1388 _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1389 size_t __l, allocator_type __a)
1391 _RopeSubstring* __space = _SAllocator(__a).allocate(1);
1392 return new(__space) _RopeSubstring(__b, __s, __l, __a);
1396 _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1397 size_t __size, allocator_type __a)
1398 # define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
1399 _S_RopeLeaf_from_unowned_char_ptr(__s, __size, __a)
1401 if (0 == __size) return 0;
1402 _CharT* __buf = __a.allocate(_S_rounded_up_size(__size));
1404 uninitialized_copy_n(__s, __size, __buf);
1405 _S_cond_store_eos(__buf[__size]);
1407 return _S_new_RopeLeaf(__buf, __size, __a);
1409 __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf, __size, __a))
1413 // Concatenation of nonempty strings.
1414 // Always builds a concatenation node.
1415 // Rebalances if the result is too deep.
1416 // Result has refcount 1.
1417 // Does not increment left and right ref counts even though
1418 // they are referenced.
1420 _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1422 // Concatenation helper functions
1424 _S_leaf_concat_char_iter(_RopeLeaf* __r,
1425 const _CharT* __iter, size_t __slen);
1426 // Concatenate by copying leaf.
1427 // should take an arbitrary iterator
1428 // result has refcount 1.
1430 static _RopeLeaf* _S_destr_leaf_concat_char_iter
1431 (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1432 // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1437 static size_t _S_char_ptr_len(const _CharT* __s);
1438 // slightly generalized strlen
1440 rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1441 : _Base(__t,__a) { }
1444 // Copy __r to the _CharT buffer.
1445 // Returns __buffer + __r->_M_size.
1446 // Assumes that buffer is uninitialized.
1447 static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1449 // Again, with explicit starting position and length.
1450 // Assumes that buffer is uninitialized.
1451 static _CharT* _S_flatten(_RopeRep* __r,
1452 size_t __start, size_t __len,
1455 static const unsigned long
1456 _S_min_len[_RopeRep::_S_max_rope_depth + 1];
1458 static bool _S_is_balanced(_RopeRep* __r)
1459 { return (__r->_M_size >= _S_min_len[__r->_M_depth]); }
1461 static bool _S_is_almost_balanced(_RopeRep* __r)
1462 { return (__r->_M_depth == 0 ||
1463 __r->_M_size >= _S_min_len[__r->_M_depth - 1]); }
1465 static bool _S_is_roughly_balanced(_RopeRep* __r)
1466 { return (__r->_M_depth <= 1 ||
1467 __r->_M_size >= _S_min_len[__r->_M_depth - 2]); }
1469 // Assumes the result is not empty.
1470 static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1473 _RopeRep* __result = _S_concat(__left, __right);
1474 if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1478 // The basic rebalancing operation. Logically copies the
1479 // rope. The result has refcount of 1. The client will
1480 // usually decrement the reference count of __r.
1481 // The result is within height 2 of balanced by the above
1483 static _RopeRep* _S_balance(_RopeRep* __r);
1485 // Add all unbalanced subtrees to the forest of balanceed trees.
1486 // Used only by balance.
1487 static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1489 // Add __r to forest, assuming __r is already balanced.
1490 static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1492 // Print to stdout, exposing structure
1493 static void _S_dump(_RopeRep* __r, int __indent = 0);
1495 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1496 static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1499 bool empty() const { return 0 == _M_tree_ptr; }
1501 // Comparison member function. This is public only for those
1502 // clients that need a ternary comparison. Others
1503 // should use the comparison operators below.
1504 int compare(const rope& __y) const {
1505 return _S_compare(_M_tree_ptr, __y._M_tree_ptr);
1508 rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1509 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),
1513 rope(const _CharT* __s, size_t __len,
1514 const allocator_type& __a = allocator_type())
1515 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a), __a)
1518 // Should perhaps be templatized with respect to the iterator type
1519 // and use Sequence_buffer. (It should perhaps use sequence_buffer
1521 rope(const _CharT *__s, const _CharT *__e,
1522 const allocator_type& __a = allocator_type())
1523 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a), __a)
1526 rope(const const_iterator& __s, const const_iterator& __e,
1527 const allocator_type& __a = allocator_type())
1528 : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1529 __e._M_current_pos), __a)
1532 rope(const iterator& __s, const iterator& __e,
1533 const allocator_type& __a = allocator_type())
1534 : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1535 __e._M_current_pos), __a)
1538 rope(_CharT __c, const allocator_type& __a = allocator_type())
1541 _CharT* __buf = _Data_allocate(_S_rounded_up_size(1));
1543 construct(__buf, __c);
1545 _M_tree_ptr = _S_new_RopeLeaf(__buf, 1, __a);
1547 __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf, 1, __a))
1550 rope(size_t __n, _CharT __c,
1551 const allocator_type& __a = allocator_type());
1553 rope(const allocator_type& __a = allocator_type())
1556 // Construct a rope from a function that can compute its members
1557 rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1558 const allocator_type& __a = allocator_type())
1561 _M_tree_ptr = (0 == __len) ?
1562 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1565 rope(const rope& __x, const allocator_type& __a = allocator_type())
1566 : _Base(__x._M_tree_ptr, __a)
1568 _S_ref(_M_tree_ptr);
1573 _S_unref(_M_tree_ptr);
1576 rope& operator=(const rope& __x)
1578 _RopeRep* __old = _M_tree_ptr;
1579 __stl_assert(get_allocator() == __x.get_allocator());
1580 _M_tree_ptr = __x._M_tree_ptr;
1581 _S_ref(_M_tree_ptr);
1588 _S_unref(_M_tree_ptr);
1592 void push_back(_CharT __x)
1594 _RopeRep* __old = _M_tree_ptr;
1595 _M_tree_ptr = _S_destr_concat_char_iter(_M_tree_ptr, &__x, 1);
1601 _RopeRep* __old = _M_tree_ptr;
1603 _S_substring(_M_tree_ptr, 0, _M_tree_ptr->_M_size - 1);
1609 return _S_fetch(_M_tree_ptr, _M_tree_ptr->_M_size - 1);
1612 void push_front(_CharT __x)
1614 _RopeRep* __old = _M_tree_ptr;
1616 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
1618 _M_tree_ptr = _S_concat(__left, _M_tree_ptr);
1622 __STL_UNWIND(_S_unref(__left))
1627 _RopeRep* __old = _M_tree_ptr;
1628 _M_tree_ptr = _S_substring(_M_tree_ptr, 1, _M_tree_ptr->_M_size);
1632 _CharT front() const
1634 return _S_fetch(_M_tree_ptr, 0);
1639 _RopeRep* __old = _M_tree_ptr;
1640 _M_tree_ptr = _S_balance(_M_tree_ptr);
1644 void copy(_CharT* __buffer) const {
1645 destroy(__buffer, __buffer + size());
1646 _S_flatten(_M_tree_ptr, __buffer);
1649 // This is the copy function from the standard, but
1650 // with the arguments reordered to make it consistent with the
1651 // rest of the interface.
1652 // Note that this guaranteed not to compile if the draft standard
1653 // order is assumed.
1654 size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
1656 size_t __size = size();
1657 size_t __len = (__pos + __n > __size? __size - __pos : __n);
1659 destroy(__buffer, __buffer + __len);
1660 _S_flatten(_M_tree_ptr, __pos, __len, __buffer);
1664 // Print to stdout, exposing structure. May be useful for
1665 // performance debugging.
1667 _S_dump(_M_tree_ptr);
1670 // Convert to 0 terminated string in new allocated memory.
1671 // Embedded 0s in the input do not terminate the copy.
1672 const _CharT* c_str() const;
1674 // As above, but lso use the flattened representation as the
1675 // the new rope representation.
1676 const _CharT* replace_with_c_str();
1678 // Reclaim memory for the c_str generated flattened string.
1679 // Intentionally undocumented, since it's hard to say when this
1680 // is safe for multiple threads.
1681 void delete_c_str () {
1682 if (0 == _M_tree_ptr) return;
1683 if (_RopeRep::_S_leaf == _M_tree_ptr->_M_tag &&
1684 ((_RopeLeaf*)_M_tree_ptr)->_M_data ==
1685 _M_tree_ptr->_M_c_string) {
1686 // Representation shared
1690 _M_tree_ptr->_M_free_c_string();
1692 _M_tree_ptr->_M_c_string = 0;
1695 _CharT operator[] (size_type __pos) const {
1696 return _S_fetch(_M_tree_ptr, __pos);
1699 _CharT at(size_type __pos) const {
1700 // if (__pos >= size()) throw out_of_range; // XXX
1701 return (*this)[__pos];
1704 const_iterator begin() const {
1705 return(const_iterator(_M_tree_ptr, 0));
1708 // An easy way to get a const iterator from a non-const container.
1709 const_iterator const_begin() const {
1710 return(const_iterator(_M_tree_ptr, 0));
1713 const_iterator end() const {
1714 return(const_iterator(_M_tree_ptr, size()));
1717 const_iterator const_end() const {
1718 return(const_iterator(_M_tree_ptr, size()));
1721 size_type size() const {
1722 return(0 == _M_tree_ptr? 0 : _M_tree_ptr->_M_size);
1725 size_type length() const {
1729 size_type max_size() const {
1730 return _S_min_len[_RopeRep::_S_max_rope_depth-1] - 1;
1731 // Guarantees that the result can be sufficirntly
1732 // balanced. Longer ropes will probably still work,
1733 // but it's harder to make guarantees.
1736 typedef reverse_iterator<const_iterator> const_reverse_iterator;
1738 const_reverse_iterator rbegin() const {
1739 return const_reverse_iterator(end());
1742 const_reverse_iterator const_rbegin() const {
1743 return const_reverse_iterator(end());
1746 const_reverse_iterator rend() const {
1747 return const_reverse_iterator(begin());
1750 const_reverse_iterator const_rend() const {
1751 return const_reverse_iterator(begin());
1754 template<class _CharT2, class _Alloc2>
1755 friend rope<_CharT2,_Alloc2>
1756 operator+ (const rope<_CharT2,_Alloc2>& __left,
1757 const rope<_CharT2,_Alloc2>& __right);
1759 template<class _CharT2, class _Alloc2>
1760 friend rope<_CharT2,_Alloc2>
1761 operator+ (const rope<_CharT2,_Alloc2>& __left,
1762 const _CharT2* __right);
1764 template<class _CharT2, class _Alloc2>
1765 friend rope<_CharT2,_Alloc2>
1766 operator+ (const rope<_CharT2,_Alloc2>& __left, _CharT2 __right);
1767 // The symmetric cases are intentionally omitted, since they're presumed
1768 // to be less common, and we don't handle them as well.
1770 // The following should really be templatized.
1771 // The first argument should be an input iterator or
1772 // forward iterator with value_type _CharT.
1773 rope& append(const _CharT* __iter, size_t __n) {
1774 _RopeRep* __result =
1775 _S_destr_concat_char_iter(_M_tree_ptr, __iter, __n);
1776 _S_unref(_M_tree_ptr);
1777 _M_tree_ptr = __result;
1781 rope& append(const _CharT* __c_string) {
1782 size_t __len = _S_char_ptr_len(__c_string);
1783 append(__c_string, __len);
1787 rope& append(const _CharT* __s, const _CharT* __e) {
1788 _RopeRep* __result =
1789 _S_destr_concat_char_iter(_M_tree_ptr, __s, __e - __s);
1790 _S_unref(_M_tree_ptr);
1791 _M_tree_ptr = __result;
1795 rope& append(const_iterator __s, const_iterator __e) {
1796 __stl_assert(__s._M_root == __e._M_root);
1797 __stl_assert(get_allocator() == __s._M_root->get_allocator());
1798 _Self_destruct_ptr __appendee(_S_substring(
1799 __s._M_root, __s._M_current_pos, __e._M_current_pos));
1800 _RopeRep* __result =
1801 _S_concat(_M_tree_ptr, (_RopeRep*)__appendee);
1802 _S_unref(_M_tree_ptr);
1803 _M_tree_ptr = __result;
1807 rope& append(_CharT __c) {
1808 _RopeRep* __result =
1809 _S_destr_concat_char_iter(_M_tree_ptr, &__c, 1);
1810 _S_unref(_M_tree_ptr);
1811 _M_tree_ptr = __result;
1815 rope& append() { return append(_CharT()); } // XXX why?
1817 rope& append(const rope& __y) {
1818 __stl_assert(__y.get_allocator() == get_allocator());
1819 _RopeRep* __result = _S_concat(_M_tree_ptr, __y._M_tree_ptr);
1820 _S_unref(_M_tree_ptr);
1821 _M_tree_ptr = __result;
1825 rope& append(size_t __n, _CharT __c) {
1826 rope<_CharT,_Alloc> __last(__n, __c);
1827 return append(__last);
1830 void swap(rope& __b) {
1831 __stl_assert(get_allocator() == __b.get_allocator());
1832 _RopeRep* __tmp = _M_tree_ptr;
1833 _M_tree_ptr = __b._M_tree_ptr;
1834 __b._M_tree_ptr = __tmp;
1839 // Result is included in refcount.
1840 static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1841 size_t __pos2, _RopeRep* __r) {
1842 if (0 == __old) { _S_ref(__r); return __r; }
1843 _Self_destruct_ptr __left(
1844 _S_substring(__old, 0, __pos1));
1845 _Self_destruct_ptr __right(
1846 _S_substring(__old, __pos2, __old->_M_size));
1849 __stl_assert(__old->get_allocator() == __r->get_allocator());
1851 __result = _S_concat(__left, __right);
1853 _Self_destruct_ptr __left_result(_S_concat(__left, __r));
1854 __result = _S_concat(__left_result, __right);
1860 void insert(size_t __p, const rope& __r) {
1861 _RopeRep* __result =
1862 replace(_M_tree_ptr, __p, __p, __r._M_tree_ptr);
1863 __stl_assert(get_allocator() == __r.get_allocator());
1864 _S_unref(_M_tree_ptr);
1865 _M_tree_ptr = __result;
1868 void insert(size_t __p, size_t __n, _CharT __c) {
1869 rope<_CharT,_Alloc> __r(__n,__c);
1873 void insert(size_t __p, const _CharT* __i, size_t __n) {
1874 _Self_destruct_ptr __left(_S_substring(_M_tree_ptr, 0, __p));
1875 _Self_destruct_ptr __right(_S_substring(_M_tree_ptr, __p, size()));
1876 _Self_destruct_ptr __left_result(
1877 _S_concat_char_iter(__left, __i, __n));
1878 // _S_ destr_concat_char_iter should be safe here.
1879 // But as it stands it's probably not a win, since __left
1880 // is likely to have additional references.
1881 _RopeRep* __result = _S_concat(__left_result, __right);
1882 _S_unref(_M_tree_ptr);
1883 _M_tree_ptr = __result;
1886 void insert(size_t __p, const _CharT* __c_string) {
1887 insert(__p, __c_string, _S_char_ptr_len(__c_string));
1890 void insert(size_t __p, _CharT __c) {
1891 insert(__p, &__c, 1);
1894 void insert(size_t __p) {
1895 _CharT __c = _CharT();
1896 insert(__p, &__c, 1);
1899 void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1904 void insert(size_t __p, const const_iterator& __i,
1905 const const_iterator& __j) {
1910 void insert(size_t __p, const iterator& __i,
1911 const iterator& __j) {
1916 // (position, length) versions of replace operations:
1918 void replace(size_t __p, size_t __n, const rope& __r) {
1919 _RopeRep* __result =
1920 replace(_M_tree_ptr, __p, __p + __n, __r._M_tree_ptr);
1921 _S_unref(_M_tree_ptr);
1922 _M_tree_ptr = __result;
1925 void replace(size_t __p, size_t __n,
1926 const _CharT* __i, size_t __i_len) {
1927 rope __r(__i, __i_len);
1928 replace(__p, __n, __r);
1931 void replace(size_t __p, size_t __n, _CharT __c) {
1933 replace(__p, __n, __r);
1936 void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1937 rope __r(__c_string);
1938 replace(__p, __n, __r);
1941 void replace(size_t __p, size_t __n,
1942 const _CharT* __i, const _CharT* __j) {
1944 replace(__p, __n, __r);
1947 void replace(size_t __p, size_t __n,
1948 const const_iterator& __i, const const_iterator& __j) {
1950 replace(__p, __n, __r);
1953 void replace(size_t __p, size_t __n,
1954 const iterator& __i, const iterator& __j) {
1956 replace(__p, __n, __r);
1959 // Single character variants:
1960 void replace(size_t __p, _CharT __c) {
1961 iterator __i(this, __p);
1965 void replace(size_t __p, const rope& __r) {
1966 replace(__p, 1, __r);
1969 void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1970 replace(__p, 1, __i, __i_len);
1973 void replace(size_t __p, const _CharT* __c_string) {
1974 replace(__p, 1, __c_string);
1977 void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1978 replace(__p, 1, __i, __j);
1981 void replace(size_t __p, const const_iterator& __i,
1982 const const_iterator& __j) {
1983 replace(__p, 1, __i, __j);
1986 void replace(size_t __p, const iterator& __i,
1987 const iterator& __j) {
1988 replace(__p, 1, __i, __j);
1991 // Erase, (position, size) variant.
1992 void erase(size_t __p, size_t __n) {
1993 _RopeRep* __result = replace(_M_tree_ptr, __p, __p + __n, 0);
1994 _S_unref(_M_tree_ptr);
1995 _M_tree_ptr = __result;
1998 // Erase, single character
1999 void erase(size_t __p) {
2000 erase(__p, __p + 1);
2003 // Insert, iterator variants.
2004 iterator insert(const iterator& __p, const rope& __r)
2005 { insert(__p.index(), __r); return __p; }
2006 iterator insert(const iterator& __p, size_t __n, _CharT __c)
2007 { insert(__p.index(), __n, __c); return __p; }
2008 iterator insert(const iterator& __p, _CharT __c)
2009 { insert(__p.index(), __c); return __p; }
2010 iterator insert(const iterator& __p )
2011 { insert(__p.index()); return __p; }
2012 iterator insert(const iterator& __p, const _CharT* c_string)
2013 { insert(__p.index(), c_string); return __p; }
2014 iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
2015 { insert(__p.index(), __i, __n); return __p; }
2016 iterator insert(const iterator& __p, const _CharT* __i,
2018 { insert(__p.index(), __i, __j); return __p; }
2019 iterator insert(const iterator& __p,
2020 const const_iterator& __i, const const_iterator& __j)
2021 { insert(__p.index(), __i, __j); return __p; }
2022 iterator insert(const iterator& __p,
2023 const iterator& __i, const iterator& __j)
2024 { insert(__p.index(), __i, __j); return __p; }
2026 // Replace, range variants.
2027 void replace(const iterator& __p, const iterator& __q,
2029 { replace(__p.index(), __q.index() - __p.index(), __r); }
2030 void replace(const iterator& __p, const iterator& __q, _CharT __c)
2031 { replace(__p.index(), __q.index() - __p.index(), __c); }
2032 void replace(const iterator& __p, const iterator& __q,
2033 const _CharT* __c_string)
2034 { replace(__p.index(), __q.index() - __p.index(), __c_string); }
2035 void replace(const iterator& __p, const iterator& __q,
2036 const _CharT* __i, size_t __n)
2037 { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
2038 void replace(const iterator& __p, const iterator& __q,
2039 const _CharT* __i, const _CharT* __j)
2040 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2041 void replace(const iterator& __p, const iterator& __q,
2042 const const_iterator& __i, const const_iterator& __j)
2043 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2044 void replace(const iterator& __p, const iterator& __q,
2045 const iterator& __i, const iterator& __j)
2046 { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2048 // Replace, iterator variants.
2049 void replace(const iterator& __p, const rope& __r)
2050 { replace(__p.index(), __r); }
2051 void replace(const iterator& __p, _CharT __c)
2052 { replace(__p.index(), __c); }
2053 void replace(const iterator& __p, const _CharT* __c_string)
2054 { replace(__p.index(), __c_string); }
2055 void replace(const iterator& __p, const _CharT* __i, size_t __n)
2056 { replace(__p.index(), __i, __n); }
2057 void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
2058 { replace(__p.index(), __i, __j); }
2059 void replace(const iterator& __p, const_iterator __i,
2061 { replace(__p.index(), __i, __j); }
2062 void replace(const iterator& __p, iterator __i, iterator __j)
2063 { replace(__p.index(), __i, __j); }
2065 // Iterator and range variants of erase
2066 iterator erase(const iterator& __p, const iterator& __q) {
2067 size_t __p_index = __p.index();
2068 erase(__p_index, __q.index() - __p_index);
2069 return iterator(this, __p_index);
2071 iterator erase(const iterator& __p) {
2072 size_t __p_index = __p.index();
2073 erase(__p_index, 1);
2074 return iterator(this, __p_index);
2077 rope substr(size_t __start, size_t __len = 1) const {
2078 return rope<_CharT,_Alloc>(
2079 _S_substring(_M_tree_ptr, __start, __start + __len));
2082 rope substr(iterator __start, iterator __end) const {
2083 return rope<_CharT,_Alloc>(
2084 _S_substring(_M_tree_ptr, __start.index(), __end.index()));
2087 rope substr(iterator __start) const {
2088 size_t __pos = __start.index();
2089 return rope<_CharT,_Alloc>(
2090 _S_substring(_M_tree_ptr, __pos, __pos + 1));
2093 rope substr(const_iterator __start, const_iterator __end) const {
2094 // This might eventually take advantage of the cache in the
2096 return rope<_CharT,_Alloc>(
2097 _S_substring(_M_tree_ptr, __start.index(), __end.index()));
2100 rope<_CharT,_Alloc> substr(const_iterator __start) {
2101 size_t __pos = __start.index();
2102 return rope<_CharT,_Alloc>(
2103 _S_substring(_M_tree_ptr, __pos, __pos + 1));
2106 static const size_type npos;
2108 size_type find(_CharT __c, size_type __pos = 0) const;
2109 size_type find(const _CharT* __s, size_type __pos = 0) const {
2110 size_type __result_pos;
2111 const_iterator __result = search(const_begin() + __pos, const_end(),
2112 __s, __s + _S_char_ptr_len(__s));
2113 __result_pos = __result.index();
2114 # ifndef __STL_OLD_ROPE_SEMANTICS
2115 if (__result_pos == size()) __result_pos = npos;
2117 return __result_pos;
2120 iterator mutable_begin() {
2121 return(iterator(this, 0));
2124 iterator mutable_end() {
2125 return(iterator(this, size()));
2128 typedef reverse_iterator<iterator> reverse_iterator;
2130 reverse_iterator mutable_rbegin() {
2131 return reverse_iterator(mutable_end());
2134 reverse_iterator mutable_rend() {
2135 return reverse_iterator(mutable_begin());
2138 reference mutable_reference_at(size_type __pos) {
2139 return reference(this, __pos);
2143 reference operator[] (size_type __pos) {
2144 return _char_ref_proxy(this, __pos);
2147 reference at(size_type __pos) {
2148 // if (__pos >= size()) throw out_of_range; // XXX
2149 return (*this)[__pos];
2152 void resize(size_type __n, _CharT __c) {}
2153 void resize(size_type __n) {}
2154 void reserve(size_type __res_arg = 0) {}
2155 size_type capacity() const {
2159 // Stuff below this line is dangerous because it's error prone.
2160 // I would really like to get rid of it.
2161 // copy function with funny arg ordering.
2162 size_type copy(_CharT* __buffer, size_type __n,
2163 size_type __pos = 0) const {
2164 return copy(__pos, __n, __buffer);
2167 iterator end() { return mutable_end(); }
2169 iterator begin() { return mutable_begin(); }
2171 reverse_iterator rend() { return mutable_rend(); }
2173 reverse_iterator rbegin() { return mutable_rbegin(); }
2177 const_iterator end() { return const_end(); }
2179 const_iterator begin() { return const_begin(); }
2181 const_reverse_iterator rend() { return const_rend(); }
2183 const_reverse_iterator rbegin() { return const_rbegin(); }
2189 template <class _CharT, class _Alloc>
2190 const rope<_CharT, _Alloc>::size_type rope<_CharT, _Alloc>::npos =
2193 template <class _CharT, class _Alloc>
2194 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2195 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2196 return (__x._M_current_pos == __y._M_current_pos &&
2197 __x._M_root == __y._M_root);
2200 template <class _CharT, class _Alloc>
2201 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2202 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2203 return (__x._M_current_pos < __y._M_current_pos);
2206 template <class _CharT, class _Alloc>
2207 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2208 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2209 return !(__x == __y);
2212 template <class _CharT, class _Alloc>
2213 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2214 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2218 template <class _CharT, class _Alloc>
2219 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2220 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2221 return !(__y < __x);
2224 template <class _CharT, class _Alloc>
2225 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2226 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2227 return !(__x < __y);
2230 template <class _CharT, class _Alloc>
2231 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
2232 const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2233 return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2236 template <class _CharT, class _Alloc>
2237 inline _Rope_const_iterator<_CharT,_Alloc>
2238 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2239 return _Rope_const_iterator<_CharT,_Alloc>(
2240 __x._M_root, __x._M_current_pos - __n);
2243 template <class _CharT, class _Alloc>
2244 inline _Rope_const_iterator<_CharT,_Alloc>
2245 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2246 return _Rope_const_iterator<_CharT,_Alloc>(
2247 __x._M_root, __x._M_current_pos + __n);
2250 template <class _CharT, class _Alloc>
2251 inline _Rope_const_iterator<_CharT,_Alloc>
2252 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
2253 return _Rope_const_iterator<_CharT,_Alloc>(
2254 __x._M_root, __x._M_current_pos + __n);
2257 template <class _CharT, class _Alloc>
2258 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
2259 const _Rope_iterator<_CharT,_Alloc>& __y) {
2260 return (__x._M_current_pos == __y._M_current_pos &&
2261 __x._M_root_rope == __y._M_root_rope);
2264 template <class _CharT, class _Alloc>
2265 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
2266 const _Rope_iterator<_CharT,_Alloc>& __y) {
2267 return (__x._M_current_pos < __y._M_current_pos);
2270 template <class _CharT, class _Alloc>
2271 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
2272 const _Rope_iterator<_CharT,_Alloc>& __y) {
2273 return !(__x == __y);
2276 template <class _CharT, class _Alloc>
2277 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
2278 const _Rope_iterator<_CharT,_Alloc>& __y) {
2282 template <class _CharT, class _Alloc>
2283 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
2284 const _Rope_iterator<_CharT,_Alloc>& __y) {
2285 return !(__y < __x);
2288 template <class _CharT, class _Alloc>
2289 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
2290 const _Rope_iterator<_CharT,_Alloc>& __y) {
2291 return !(__x < __y);
2294 template <class _CharT, class _Alloc>
2295 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2296 const _Rope_iterator<_CharT,_Alloc>& __y) {
2297 return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2300 template <class _CharT, class _Alloc>
2301 inline _Rope_iterator<_CharT,_Alloc>
2302 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2304 return _Rope_iterator<_CharT,_Alloc>(
2305 __x._M_root_rope, __x._M_current_pos - __n);
2308 template <class _CharT, class _Alloc>
2309 inline _Rope_iterator<_CharT,_Alloc>
2310 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
2312 return _Rope_iterator<_CharT,_Alloc>(
2313 __x._M_root_rope, __x._M_current_pos + __n);
2316 template <class _CharT, class _Alloc>
2317 inline _Rope_iterator<_CharT,_Alloc>
2318 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
2319 return _Rope_iterator<_CharT,_Alloc>(
2320 __x._M_root_rope, __x._M_current_pos + __n);
2323 template <class _CharT, class _Alloc>
2326 operator+ (const rope<_CharT,_Alloc>& __left,
2327 const rope<_CharT,_Alloc>& __right)
2329 __stl_assert(__left.get_allocator() == __right.get_allocator());
2330 return rope<_CharT,_Alloc>(
2331 rope<_CharT,_Alloc>::_S_concat(__left._M_tree_ptr, __right._M_tree_ptr));
2332 // Inlining this should make it possible to keep __left and
2333 // __right in registers.
2336 template <class _CharT, class _Alloc>
2338 rope<_CharT,_Alloc>&
2339 operator+= (rope<_CharT,_Alloc>& __left,
2340 const rope<_CharT,_Alloc>& __right)
2342 __left.append(__right);
2346 template <class _CharT, class _Alloc>
2349 operator+ (const rope<_CharT,_Alloc>& __left,
2350 const _CharT* __right) {
2351 size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
2352 return rope<_CharT,_Alloc>(
2353 rope<_CharT,_Alloc>::_S_concat_char_iter(
2354 __left._M_tree_ptr, __right, __rlen));
2357 template <class _CharT, class _Alloc>
2359 rope<_CharT,_Alloc>&
2360 operator+= (rope<_CharT,_Alloc>& __left,
2361 const _CharT* __right) {
2362 __left.append(__right);
2366 template <class _CharT, class _Alloc>
2369 operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
2370 return rope<_CharT,_Alloc>(
2371 rope<_CharT,_Alloc>::_S_concat_char_iter(
2372 __left._M_tree_ptr, &__right, 1));
2375 template <class _CharT, class _Alloc>
2377 rope<_CharT,_Alloc>&
2378 operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
2379 __left.append(__right);
2383 template <class _CharT, class _Alloc>
2385 operator< (const rope<_CharT,_Alloc>& __left,
2386 const rope<_CharT,_Alloc>& __right) {
2387 return __left.compare(__right) < 0;
2390 template <class _CharT, class _Alloc>
2392 operator== (const rope<_CharT,_Alloc>& __left,
2393 const rope<_CharT,_Alloc>& __right) {
2394 return __left.compare(__right) == 0;
2397 template <class _CharT, class _Alloc>
2398 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2399 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2400 return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
2403 template <class _CharT, class _Alloc>
2405 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2406 return !(__x == __y);
2409 template <class _CharT, class _Alloc>
2411 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2415 template <class _CharT, class _Alloc>
2417 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2418 return !(__y < __x);
2421 template <class _CharT, class _Alloc>
2423 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2424 return !(__x < __y);
2427 template <class _CharT, class _Alloc>
2428 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2429 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2430 return !(__x == __y);
2433 template<class _CharT, class _Traits, class _Alloc>
2434 basic_ostream<_CharT, _Traits>& operator<<
2435 (basic_ostream<_CharT, _Traits>& __o,
2436 const rope<_CharT, _Alloc>& __r);
2438 typedef rope<char> crope;
2439 typedef rope<wchar_t> wrope;
2441 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
2443 return __c.mutable_reference_at(__i);
2446 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
2448 return __c.mutable_reference_at(__i);
2451 template <class _CharT, class _Alloc>
2452 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
2456 // Hash functions should probably be revisited later:
2457 template<> struct hash<crope>
2459 size_t operator()(const crope& __str) const
2461 size_t __size = __str.size();
2463 if (0 == __size) return 0;
2464 return 13*__str[0] + 5*__str[__size - 1] + __size;
2469 template<> struct hash<wrope>
2471 size_t operator()(const wrope& __str) const
2473 size_t __size = __str.size();
2475 if (0 == __size) return 0;
2476 return 13*__str[0] + 5*__str[__size - 1] + __size;
2482 # include <ext/ropeimpl.h>
2484 # endif /* __SGI_STL_INTERNAL_ROPE_H */