3 * Silicon Graphics Computer Systems, Inc.
5 * Permission to use, copy, modify, distribute and sell this software
6 * and its documentation for any purpose is hereby granted without fee,
7 * provided that the above copyright notice appear in all copies and
8 * that both that copyright notice and this permission notice appear
9 * in supporting documentation. Silicon Graphics makes no
10 * representations about the suitability of this software for any
11 * purpose. It is provided "as is" without express or implied warranty.
14 /* NOTE: This is an internal header file, included by other STL headers.
15 * You should not attempt to use it directly.
18 // rope<_CharT,_Alloc> is a sequence of _CharT.
19 // Ropes appear to be mutable, but update operations
20 // really copy enough of the data structure to leave the original
21 // valid. Thus ropes can be logically copied by just copying
24 #ifndef __SGI_STL_INTERNAL_ROPE_H
25 # define __SGI_STL_INTERNAL_ROPE_H
28 # define __GC_CONST const
30 # define __GC_CONST // constant except for deallocation
32 # ifdef __STL_SGI_THREADS
38 #if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
42 // The _S_eos function is used for those functions that
43 // convert to/from C-like strings to detect the end of the string.
45 // The end-of-C-string character.
46 // This is what the draft standard says it should be.
47 template <class _CharT
>
48 inline _CharT
_S_eos(_CharT
*) { return _CharT(); }
50 // Test for basic character types.
51 // For basic character types leaves having a trailing eos.
52 template <class _CharT
>
53 inline bool _S_is_basic_char_type(_CharT
*) { return false; }
54 template <class _CharT
>
55 inline bool _S_is_one_byte_char_type(_CharT
*) { return false; }
57 inline bool _S_is_basic_char_type(char*) { return true; }
58 inline bool _S_is_one_byte_char_type(char*) { return true; }
59 inline bool _S_is_basic_char_type(wchar_t*) { return true; }
61 // Store an eos iff _CharT is a basic character type.
62 // Do not reference _S_eos if it isn't.
63 template <class _CharT
>
64 inline void _S_cond_store_eos(_CharT
&) {}
66 inline void _S_cond_store_eos(char& __c
) { __c
= 0; }
67 inline void _S_cond_store_eos(wchar_t& __c
) { __c
= 0; }
69 // char_producers are logically functions that generate a section of
70 // a string. These can be convereted to ropes. The resulting rope
71 // invokes the char_producer on demand. This allows, for example,
72 // files to be viewed as ropes without reading the entire file.
73 template <class _CharT
>
76 virtual ~char_producer() {};
77 virtual void operator()(size_t __start_pos
, size_t __len
,
78 _CharT
* __buffer
) = 0;
79 // Buffer should really be an arbitrary output iterator.
80 // That way we could flatten directly into an ostream, etc.
81 // This is thoroughly impossible, since iterator types don't
82 // have runtime descriptions.
87 // Sequence must provide an append operation that appends an
88 // array to the sequence. Sequence buffers are useful only if
89 // appending an entire array is cheaper than appending element by element.
90 // This is true for many string representations.
91 // This should perhaps inherit from ostream<sequence::value_type>
92 // and be implemented correspondingly, so that they can be used
93 // for formatted. For the sake of portability, we don't do this yet.
95 // For now, sequence buffers behave as output iterators. But they also
96 // behave a little like basic_ostringstream<sequence::value_type> and a
97 // little like containers.
99 template<class _Sequence
, size_t _Buf_sz
= 100
100 # if defined(__sgi) && !defined(__GNUC__)
101 # define __TYPEDEF_WORKAROUND
102 ,class _V
= typename
_Sequence::value_type
105 // The 3rd parameter works around a common compiler bug.
106 class sequence_buffer
: public output_iterator
{
108 # ifndef __TYPEDEF_WORKAROUND
109 typedef typename
_Sequence::value_type value_type
;
111 typedef _V value_type
;
114 _Sequence
* _M_prefix
;
115 value_type _M_buffer
[_Buf_sz
];
119 _M_prefix
->append(_M_buffer
, _M_buffer
+ _M_buf_count
);
122 ~sequence_buffer() { flush(); }
123 sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
124 sequence_buffer(const sequence_buffer
& __x
) {
125 _M_prefix
= __x
._M_prefix
;
126 _M_buf_count
= __x
._M_buf_count
;
127 copy(__x
._M_buffer
, __x
._M_buffer
+ __x
._M_buf_count
, _M_buffer
);
129 sequence_buffer(sequence_buffer
& __x
) {
131 _M_prefix
= __x
._M_prefix
;
134 sequence_buffer(_Sequence
& __s
) : _M_prefix(&__s
), _M_buf_count(0) {}
135 sequence_buffer
& operator= (sequence_buffer
& __x
) {
137 _M_prefix
= __x
._M_prefix
;
141 sequence_buffer
& operator= (const sequence_buffer
& __x
) {
142 _M_prefix
= __x
._M_prefix
;
143 _M_buf_count
= __x
._M_buf_count
;
144 copy(__x
._M_buffer
, __x
._M_buffer
+ __x
._M_buf_count
, _M_buffer
);
147 void push_back(value_type __x
)
149 if (_M_buf_count
< _Buf_sz
) {
150 _M_buffer
[_M_buf_count
] = __x
;
158 void append(value_type
* __s
, size_t __len
)
160 if (__len
+ _M_buf_count
<= _Buf_sz
) {
161 size_t __i
= _M_buf_count
;
163 for (; __j
< __len
; __i
++, __j
++) {
164 _M_buffer
[__i
] = __s
[__j
];
166 _M_buf_count
+= __len
;
167 } else if (0 == _M_buf_count
) {
168 _M_prefix
->append(__s
, __s
+ __len
);
174 sequence_buffer
& write(value_type
* __s
, size_t __len
)
179 sequence_buffer
& put(value_type __x
)
184 sequence_buffer
& operator=(const value_type
& __rhs
)
189 sequence_buffer
& operator*() { return *this; }
190 sequence_buffer
& operator++() { return *this; }
191 sequence_buffer
& operator++(int) { return *this; }
194 // The following should be treated as private, at least for now.
195 template<class _CharT
>
196 class _Rope_char_consumer
{
198 // If we had member templates, these should not be virtual.
199 // For now we need to use run-time parametrization where
200 // compile-time would do. _Hence this should all be private
202 // The symmetry with char_producer is accidental and temporary.
203 virtual ~_Rope_char_consumer() {};
204 virtual bool operator()(const _CharT
* __buffer
, size_t __len
) = 0;
208 // What follows should really be local to rope. Unfortunately,
209 // that doesn't work, since it makes it impossible to define generic
210 // equality on rope iterators. According to the draft standard, the
211 // template parameters for such an equality operator cannot be inferred
212 // from the occurence of a member class as a parameter.
213 // (SGI compilers in fact allow this, but the __result wouldn't be
215 // Similarly, some of the static member functions are member functions
216 // only to avoid polluting the global namespace, and to circumvent
217 // restrictions on type inference for template functions.
220 template<class _CharT
, class _Alloc
=__STL_DEFAULT_ALLOCATOR(_CharT
)> class rope
;
221 template<class _CharT
, class _Alloc
> struct _Rope_RopeConcatenation
;
222 template<class _CharT
, class _Alloc
> struct _Rope_RopeLeaf
;
223 template<class _CharT
, class _Alloc
> struct _Rope_RopeFunction
;
224 template<class _CharT
, class _Alloc
> struct _Rope_RopeSubstring
;
225 template<class _CharT
, class _Alloc
> class _Rope_iterator
;
226 template<class _CharT
, class _Alloc
> class _Rope_const_iterator
;
227 template<class _CharT
, class _Alloc
> class _Rope_char_ref_proxy
;
228 template<class _CharT
, class _Alloc
> class _Rope_char_ptr_proxy
;
231 // The internal data structure for representing a rope. This is
232 // private to the implementation. A rope is really just a pointer
235 // A few basic functions for manipulating this data structure
236 // are members of _RopeRep. Most of the more complex algorithms
237 // are implemented as rope members.
239 // Some of the static member functions of _RopeRep have identically
240 // named functions in rope that simply invoke the _RopeRep versions.
242 // A macro to introduce various allocation and deallocation functions
243 // These need to be defined differently depending on whether or not
244 // we are using standard conforming allocators, and whether the allocator
245 // instances have real state. Thus this macro is invoked repeatedly
246 // with different definitions of __ROPE_DEFINE_ALLOC.
247 // __ROPE_DEFINE_ALLOC(type,name) defines
248 // type * name_allocate(size_t) and
249 // void name_deallocate(tipe *, size_t)
250 // Both functions may or may not be static.
252 #define __ROPE_DEFINE_ALLOCS(__a) \
253 __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \
254 typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
255 __ROPE_DEFINE_ALLOC(__C,_C) \
256 typedef _Rope_RopeLeaf<_CharT,__a> __L; \
257 __ROPE_DEFINE_ALLOC(__L,_L) \
258 typedef _Rope_RopeFunction<_CharT,__a> __F; \
259 __ROPE_DEFINE_ALLOC(__F,_F) \
260 typedef _Rope_RopeSubstring<_CharT,__a> __S; \
261 __ROPE_DEFINE_ALLOC(__S,_S)
263 // Internal rope nodes potentially store a copy of the allocator
264 // instance used to allocate them. This is mostly redundant.
265 // But the alternative would be to pass allocator instances around
266 // in some form to nearly all internal functions, since any pointer
267 // assignment may result in a zero reference count and thus require
269 // The _Rope_rep_base class encapsulates
270 // the differences between SGI-style allocators and standard-conforming
273 #ifdef __STL_USE_STD_ALLOCATORS
275 #define __STATIC_IF_SGI_ALLOC /* not static */
277 // Base class for ordinary allocators.
278 template <class _CharT
, class _Allocator
, bool _IsStatic
>
279 class _Rope_rep_alloc_base
{
281 typedef typename _Alloc_traits
<_CharT
,_Allocator
>::allocator_type
283 allocator_type
get_allocator() const { return _M_data_allocator
; }
284 _Rope_rep_alloc_base(size_t __size
, const allocator_type
& __a
)
285 : _M_size(__size
), _M_data_allocator(__a
) {}
286 size_t _M_size
; // This is here only to avoid wasting space
287 // for an otherwise empty base class.
291 allocator_type _M_data_allocator
;
293 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
295 _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
296 /*static*/ _Tp * __name##_allocate(size_t __n) \
297 { return __name##Allocator(_M_data_allocator).allocate(__n); } \
298 void __name##_deallocate(_Tp* __p, size_t __n) \
299 { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
300 __ROPE_DEFINE_ALLOCS(_Allocator
);
301 # undef __ROPE_DEFINE_ALLOC
304 // Specialization for allocators that have the property that we don't
305 // actually have to store an allocator object.
306 template <class _CharT
, class _Allocator
>
307 class _Rope_rep_alloc_base
<_CharT
,_Allocator
,true> {
309 typedef typename _Alloc_traits
<_CharT
,_Allocator
>::allocator_type
311 allocator_type
get_allocator() const { return allocator_type(); }
312 _Rope_rep_alloc_base(size_t __size
, const allocator_type
&)
318 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
320 _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
322 _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
323 static _Tp* __name##_allocate(size_t __n) \
324 { return __name##Alloc::allocate(__n); } \
325 void __name##_deallocate(_Tp *__p, size_t __n) \
326 { __name##Alloc::deallocate(__p, __n); }
327 __ROPE_DEFINE_ALLOCS(_Allocator
);
328 # undef __ROPE_DEFINE_ALLOC
331 template <class _CharT
, class _Alloc
>
332 struct _Rope_rep_base
333 : public _Rope_rep_alloc_base
<_CharT
,_Alloc
,
334 _Alloc_traits
<_CharT
,_Alloc
>::_S_instanceless
>
336 typedef _Rope_rep_alloc_base
<_CharT
,_Alloc
,
337 _Alloc_traits
<_CharT
,_Alloc
>::_S_instanceless
>
339 typedef typename
_Base::allocator_type allocator_type
;
340 _Rope_rep_base(size_t __size
, const allocator_type
& __a
)
341 : _Base(__size
, __a
) {}
344 #else /* !__STL_USE_STD_ALLOCATORS */
346 #define __STATIC_IF_SGI_ALLOC static
348 template <class _CharT
, class _Alloc
>
349 class _Rope_rep_base
{
351 typedef _Alloc allocator_type
;
352 static allocator_type
get_allocator() { return allocator_type(); }
353 _Rope_rep_base(size_t __size
, const allocator_type
&) : _M_size(__size
) {}
358 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
359 typedef simple_alloc<_Tp, _Alloc> __name##Alloc; \
360 static _Tp* __name##_allocate(size_t __n) \
361 { return __name##Alloc::allocate(__n); } \
362 static void __name##_deallocate(_Tp* __p, size_t __n) \
363 { __name##Alloc::deallocate(__p, __n); }
364 __ROPE_DEFINE_ALLOCS(_Alloc
);
365 # undef __ROPE_DEFINE_ALLOC
368 #endif /* __STL_USE_STD_ALLOCATORS */
371 template<class _CharT
, class _Alloc
>
372 struct _Rope_RopeRep
: public _Rope_rep_base
<_CharT
,_Alloc
> {
374 enum { _S_max_rope_depth
= 45 };
375 enum _Tag
{_S_leaf
, _S_concat
, _S_substringfn
, _S_function
};
377 bool _M_is_balanced
:8;
378 unsigned char _M_depth
;
379 __GC_CONST _CharT
* _M_c_string
;
380 /* Flattened version of string, if needed. */
382 /* If it's not 0, then the memory is owned */
384 /* In the case of a leaf, this may point to */
385 /* the same memory as the data field. */
386 typedef _Rope_rep_base
<_CharT
,_Alloc
>::allocator_type allocator_type
;
387 _Rope_RopeRep(_Tag __t
, int __d
, bool __b
, size_t __size
,
389 : _Rope_rep_base
<_CharT
,_Alloc
>(__size
, __a
),
390 _M_tag(__t
), _M_depth(__d
), _M_is_balanced(__b
), _M_c_string(0)
394 _M_init_refcount_lock();
398 # if defined(__STL_WIN32THREADS)
399 long _M_refcount
; // InterlockedIncrement wants a long *
403 // We count references from rope instances
404 // and references from other rope nodes. We
405 // do not count const_iterator references.
406 // Iterator references are counted so that rope modifications
407 // can be detected after the fact.
408 // Generally function results are counted, i.__e.
409 // a pointer returned by a function is included at the
410 // point at which the pointer is returned.
411 // The recipient should decrement the count if the
412 // __result is not needed.
413 // Generally function arguments are not reflected
414 // in the reference count. The callee should increment
415 // the count before saving the argument someplace that
416 // will outlive the call.
419 # ifdef __STL_SGI_THREADS
420 // Reference counting with multiple threads and no
421 // hardware or thread package support is pretty awful.
422 // Mutexes are normally too expensive.
423 // We'll assume a COMPARE_AND_SWAP(destp, __old, new)
424 // operation, which might be cheaper.
425 # if __mips < 3 || !(defined (_ABIN32) || defined(_ABI64))
426 # define __add_and_fetch(l,v) add_then_test((unsigned long*)l,v)
428 void _M_init_refcount_lock() {}
429 void _M_incr_refcount ()
431 __add_and_fetch(&_M_refcount
, 1);
433 size_t _M_decr_refcount ()
435 return __add_and_fetch(&_M_refcount
, (size_t)(-1));
437 # elif defined(__STL_WIN32THREADS)
438 void _M_init_refcount_lock() {}
439 void _M_incr_refcount ()
441 InterlockedIncrement(&_M_refcount
);
443 size_t _M_decr_refcount ()
445 return InterlockedDecrement(&_M_refcount
);
447 # elif defined(__STL_PTHREADS)
448 // This should be portable, but performance is expected
449 // to be quite awful. This really needs platform specific
451 pthread_mutex_t _M_refcount_lock
;
452 void _M_init_refcount_lock() {
453 pthread_mutex_init(&_M_refcount_lock
, 0);
455 void _M_incr_refcount ()
457 pthread_mutex_lock(&_M_refcount_lock
);
459 pthread_mutex_unlock(&_M_refcount_lock
);
461 size_t _M_decr_refcount ()
464 pthread_mutex_lock(&_M_refcount_lock
);
465 __result
= --_M_refcount
;
466 pthread_mutex_unlock(&_M_refcount_lock
);
470 void _M_init_refcount_lock() {}
471 void _M_incr_refcount ()
475 size_t _M_decr_refcount ()
482 void _M_incr_refcount () {}
484 # ifdef __STL_USE_STD_ALLOCATORS
485 static void _S_free_string(__GC_CONST _CharT
*, size_t __len
,
487 # define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);
489 static void _S_free_string(__GC_CONST _CharT
*, size_t __len
);
490 # define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l);
492 // Deallocate data section of a leaf.
493 // This shouldn't be a member function.
494 // But its hard to do anything else at the
495 // moment, because it's templatized w.r.t.
497 // Does nothing if __GC is defined.
499 void _M_free_c_string();
501 // Deallocate t. Assumes t is not 0.
502 void _M_unref_nonnil()
504 if (0 == _M_decr_refcount()) _M_free_tree();
510 static void _S_unref(_Rope_RopeRep
* __t
)
513 __t
->_M_unref_nonnil();
516 static void _S_ref(_Rope_RopeRep
* __t
)
518 if (0 != __t
) __t
->_M_incr_refcount();
520 static void _S_free_if_unref(_Rope_RopeRep
* __t
)
522 if (0 != __t
&& 0 == __t
->_M_refcount
) __t
->_M_free_tree();
525 void _M_unref_nonnil() {}
526 void _M_ref_nonnil() {}
527 static void _S_unref(_Rope_RopeRep
*) {}
528 static void _S_ref(_Rope_RopeRep
*) {}
529 static void _S_free_if_unref(_Rope_RopeRep
*) {}
534 template<class _CharT
, class _Alloc
>
535 struct _Rope_RopeLeaf
: public _Rope_RopeRep
<_CharT
,_Alloc
> {
537 // Apparently needed by VC++
538 // The data fields of leaves are allocated with some
539 // extra space, to accomodate future growth and for basic
540 // character types, to hold a trailing eos character.
541 enum { _S_alloc_granularity
= 8 };
542 static size_t _S_rounded_up_size(size_t __n
) {
543 size_t __size_with_eos
;
545 if (_S_is_basic_char_type((_CharT
*)0)) {
546 __size_with_eos
= __n
+ 1;
548 __size_with_eos
= __n
;
551 return __size_with_eos
;
553 // Allow slop for in-place expansion.
554 return (__size_with_eos
+ _S_alloc_granularity
-1)
555 &~ (_S_alloc_granularity
-1);
558 __GC_CONST _CharT
* _M_data
; /* Not necessarily 0 terminated. */
559 /* The allocated size is */
560 /* _S_rounded_up_size(size), except */
561 /* in the GC case, in which it */
562 /* doesn't matter. */
563 typedef _Rope_rep_base
<_CharT
,_Alloc
>::allocator_type allocator_type
;
564 _Rope_RopeLeaf(__GC_CONST _CharT
* __d
, size_t __size
, allocator_type __a
)
565 : _Rope_RopeRep
<_CharT
,_Alloc
>(_S_leaf
, 0, true, __size
, __a
),
568 __stl_assert(__size
> 0);
569 if (_S_is_basic_char_type((_CharT
*)0)) {
570 // already eos terminated.
574 // The constructor assumes that d has been allocated with
575 // the proper allocator and the properly padded size.
576 // In contrast, the destructor deallocates the data:
579 if (_M_data
!= _M_c_string
) {
582 __STL_FREE_STRING(_M_data
, _M_size
, get_allocator());
587 template<class _CharT
, class _Alloc
>
588 struct _Rope_RopeConcatenation
: public _Rope_RopeRep
<_CharT
,_Alloc
> {
590 _Rope_RopeRep
<_CharT
,_Alloc
>* _M_left
;
591 _Rope_RopeRep
<_CharT
,_Alloc
>* _M_right
;
592 typedef _Rope_rep_base
<_CharT
,_Alloc
>::allocator_type allocator_type
;
593 _Rope_RopeConcatenation(_Rope_RopeRep
<_CharT
,_Alloc
>* __l
,
594 _Rope_RopeRep
<_CharT
,_Alloc
>* __r
,
596 : _Rope_RopeRep
<_CharT
,_Alloc
>(
597 _S_concat
, max(__l
->_M_depth
, __r
->_M_depth
) + 1, false,
598 __l
->_M_size
+ __r
->_M_size
, __a
),
599 _M_left(__l
), _M_right(__r
)
602 ~_Rope_RopeConcatenation() {
604 _M_left
->_M_unref_nonnil();
605 _M_right
->_M_unref_nonnil();
610 template<class _CharT
, class _Alloc
>
611 struct _Rope_RopeFunction
: public _Rope_RopeRep
<_CharT
,_Alloc
> {
613 char_producer
<_CharT
>* _M_fn
;
615 bool _M_delete_when_done
; // Char_producer is owned by the
616 // rope and should be explicitly
617 // deleted when the rope becomes
620 // In the GC case, we either register the rope for
621 // finalization, or not. Thus the field is unnecessary;
622 // the information is stored in the collector data structures.
623 // We do need a finalization procedure to be invoked by the
625 static void _S_fn_finalization_proc(void * __tree
, void *) {
626 delete ((_Rope_RopeFunction
*)__tree
) -> _M_fn
;
629 typedef _Rope_rep_base
<_CharT
,_Alloc
>::allocator_type allocator_type
;
630 _Rope_RopeFunction(char_producer
<_CharT
>* __f
, size_t __size
,
631 bool __d
, allocator_type __a
)
632 :_Rope_RopeRep
<_CharT
,_Alloc
>(_S_function
, 0, true, __size
, __a
),
635 , _M_delete_when_done(__d
)
638 __stl_assert(__size
> 0);
641 GC_REGISTER_FINALIZER(
642 this, _Rope_RopeFunction::_S_fn_finalization_proc
, 0, 0, 0);
647 ~_Rope_RopeFunction() {
649 if (_M_delete_when_done
) {
655 // Substring results are usually represented using just
656 // concatenation nodes. But in the case of very long flat ropes
657 // or ropes with a functional representation that isn't practical.
658 // In that case, we represent the __result as a special case of
659 // RopeFunction, whose char_producer points back to the rope itself.
660 // In all cases except repeated substring operations and
661 // deallocation, we treat the __result as a RopeFunction.
662 template<class _CharT
, class _Alloc
>
663 struct _Rope_RopeSubstring
: public _Rope_RopeFunction
<_CharT
,_Alloc
>,
664 public char_producer
<_CharT
> {
666 // XXX this whole class should be rewritten.
667 _Rope_RopeRep
<_CharT
,_Alloc
>* _M_base
; // not 0
669 virtual void operator()(size_t __start_pos
, size_t __req_len
,
671 switch(_M_base
->_M_tag
) {
675 char_producer
<_CharT
>* __fn
=
676 ((_Rope_RopeFunction
<_CharT
,_Alloc
>*)_M_base
)->_M_fn
;
677 __stl_assert(__start_pos
+ __req_len
<= _M_size
);
678 __stl_assert(_M_start
+ _M_size
<= _M_base
->_M_size
);
679 (*__fn
)(__start_pos
+ _M_start
, __req_len
, __buffer
);
684 __GC_CONST _CharT
* __s
=
685 ((_Rope_RopeLeaf
<_CharT
,_Alloc
>*)_M_base
)->_M_data
;
686 uninitialized_copy_n(__s
+ __start_pos
+ _M_start
, __req_len
,
694 typedef _Rope_rep_base
<_CharT
,_Alloc
>::allocator_type allocator_type
;
695 _Rope_RopeSubstring(_Rope_RopeRep
<_CharT
,_Alloc
>* __b
, size_t __s
,
696 size_t __l
, allocator_type __a
)
697 : _Rope_RopeFunction
<_CharT
,_Alloc
>(this, __l
, false, __a
), _M_base(__b
)
700 __stl_assert(__l
> 0);
701 __stl_assert(__s
+ __l
<= __b
->_M_size
);
703 _M_base
->_M_ref_nonnil();
705 _M_tag
= _S_substringfn
;
707 virtual ~_Rope_RopeSubstring()
710 _M_base
->_M_unref_nonnil();
711 // _M_free_c_string(); -- done by parent class
717 // Self-destructing pointers to Rope_rep.
718 // These are not conventional smart pointers. Their
719 // only purpose in life is to ensure that unref is called
720 // on the pointer either at normal exit or if an exception
721 // is raised. It is the caller's responsibility to
722 // adjust reference counts when these pointers are initialized
723 // or assigned to. (This convention significantly reduces
724 // the number of potentially expensive reference count
727 template<class _CharT
, class _Alloc
>
728 struct _Rope_self_destruct_ptr
{
729 _Rope_RopeRep
<_CharT
,_Alloc
>* _M_ptr
;
730 ~_Rope_self_destruct_ptr()
731 { _Rope_RopeRep
<_CharT
,_Alloc
>::_S_unref(_M_ptr
); }
732 # ifdef __STL_USE_EXCEPTIONS
733 _Rope_self_destruct_ptr() : _M_ptr(0) {};
735 _Rope_self_destruct_ptr() {};
737 _Rope_self_destruct_ptr(_Rope_RopeRep
<_CharT
,_Alloc
>* __p
) : _M_ptr(__p
) {}
738 _Rope_RopeRep
<_CharT
,_Alloc
>& operator*() { return *_M_ptr
; }
739 _Rope_RopeRep
<_CharT
,_Alloc
>* operator->() { return _M_ptr
; }
740 operator _Rope_RopeRep
<_CharT
,_Alloc
>*() { return _M_ptr
; }
741 _Rope_self_destruct_ptr
& operator= (_Rope_RopeRep
<_CharT
,_Alloc
>* __x
)
742 { _M_ptr
= __x
; return *this; }
746 // Dereferencing a nonconst iterator has to return something
747 // that behaves almost like a reference. It's not possible to
748 // return an actual reference since assignment requires extra
749 // work. And we would get into the same problems as with the
750 // CD2 version of basic_string.
751 template<class _CharT
, class _Alloc
>
752 class _Rope_char_ref_proxy
{
753 friend class rope
<_CharT
,_Alloc
>;
754 friend class _Rope_iterator
<_CharT
,_Alloc
>;
755 friend class _Rope_char_ptr_proxy
<_CharT
,_Alloc
>;
757 typedef _Rope_RopeRep
<_CharT
,_Alloc
>* _Self_destruct_ptr
;
759 typedef _Rope_self_destruct_ptr
<_CharT
,_Alloc
> _Self_destruct_ptr
;
761 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
762 typedef rope
<_CharT
,_Alloc
> _My_rope
;
765 bool _M_current_valid
;
766 _My_rope
* _M_root
; // The whole rope.
768 _Rope_char_ref_proxy(_My_rope
* __r
, size_t __p
) :
769 _M_pos(__p
), _M_current_valid(false), _M_root(__r
) {}
770 _Rope_char_ref_proxy(const _Rope_char_ref_proxy
& __x
) :
771 _M_pos(__x
._M_pos
), _M_current_valid(false), _M_root(__x
._M_root
) {}
772 // Don't preserve cache if the reference can outlive the
773 // expression. We claim that's not possible without calling
774 // a copy constructor or generating reference to a proxy
775 // reference. We declare the latter to have undefined semantics.
776 _Rope_char_ref_proxy(_My_rope
* __r
, size_t __p
,
778 _M_pos(__p
), _M_current(__c
), _M_current_valid(true), _M_root(__r
) {}
779 inline operator _CharT () const;
780 _Rope_char_ref_proxy
& operator= (_CharT __c
);
781 _Rope_char_ptr_proxy
<_CharT
,_Alloc
> operator& () const;
782 _Rope_char_ref_proxy
& operator= (const _Rope_char_ref_proxy
& __c
) {
783 return operator=((_CharT
)__c
);
787 #ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
788 template<class _CharT
, class __Alloc
>
789 inline void swap(_Rope_char_ref_proxy
<_CharT
, __Alloc
> __a
,
790 _Rope_char_ref_proxy
<_CharT
, __Alloc
> __b
) {
796 // There is no really acceptable way to handle this. The default
797 // definition of swap doesn't work for proxy references.
798 // It can't really be made to work, even with ugly hacks, since
799 // the only unusual operation it uses is the copy constructor, which
800 // is needed for other purposes. We provide a macro for
801 // full specializations, and instantiate the most common case.
802 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
803 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
804 _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
805 _CharT __tmp = __a; \
810 _ROPE_SWAP_SPECIALIZATION(char,__STL_DEFAULT_ALLOCATOR(char))
812 #endif /* !__STL_FUNCTION_TMPL_PARTIAL_ORDER */
814 template<class _CharT
, class _Alloc
>
815 class _Rope_char_ptr_proxy
{
816 // XXX this class should be rewritten.
817 friend class _Rope_char_ref_proxy
<_CharT
,_Alloc
>;
819 rope
<_CharT
,_Alloc
>* _M_root
; // The whole rope.
821 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy
<_CharT
,_Alloc
>& __x
)
822 : _M_pos(__x
._M_pos
), _M_root(__x
._M_root
) {}
823 _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy
& __x
)
824 : _M_pos(__x
._M_pos
), _M_root(__x
._M_root
) {}
825 _Rope_char_ptr_proxy() {}
826 _Rope_char_ptr_proxy(_CharT
* __x
) : _M_root(0), _M_pos(0) {
827 __stl_assert(0 == __x
);
829 _Rope_char_ptr_proxy
&
830 operator= (const _Rope_char_ptr_proxy
& __x
) {
832 _M_root
= __x
._M_root
;
835 friend bool operator== __STL_NULL_TMPL_ARGS
836 (const _Rope_char_ptr_proxy
<_CharT
,_Alloc
>& __x
,
837 const _Rope_char_ptr_proxy
<_CharT
,_Alloc
>& __y
);
839 _Rope_char_ref_proxy
<_CharT
,_Alloc
> operator*() const {
840 return _Rope_char_ref_proxy
<_CharT
,_Alloc
>(_M_root
, _M_pos
);
846 // Unlike in the C version, we cache only part of the stack
847 // for rope iterators, since they must be efficiently copyable.
848 // When we run out of cache, we have to reconstruct the iterator
850 // Pointers from iterators are not included in reference counts.
851 // Iterators are assumed to be thread private. Ropes can
854 #if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
855 #pragma set woff 1375
858 template<class _CharT
, class _Alloc
>
859 class _Rope_iterator_base
860 : public random_access_iterator
<_CharT
, ptrdiff_t> {
861 friend class rope
<_CharT
,_Alloc
>;
863 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
864 // Borland doesnt want this to be protected.
866 enum { _S_path_cache_len
= 4 }; // Must be <= 9.
867 enum { _S_iterator_buf_len
= 15 };
868 size_t _M_current_pos
;
869 _RopeRep
* _M_root
; // The whole rope.
870 size_t _M_leaf_pos
; // Starting position for current leaf
871 __GC_CONST _CharT
* _M_buf_start
;
873 // containing current char.
874 __GC_CONST _CharT
* _M_buf_ptr
;
875 // Pointer to current char in buffer.
876 // != 0 ==> buffer valid.
877 __GC_CONST _CharT
* _M_buf_end
;
878 // One past __last valid char in buffer.
879 // What follows is the path cache. We go out of our
880 // way to make this compact.
881 // Path_end contains the bottom section of the path from
882 // the root to the current leaf.
883 const _RopeRep
* _M_path_end
[_S_path_cache_len
];
884 int _M_leaf_index
; // Last valid __pos in path_end;
885 // _M_path_end[0] ... _M_path_end[leaf_index-1]
886 // point to concatenation nodes.
887 unsigned char _M_path_directions
;
888 // (path_directions >> __i) & 1 is 1
889 // iff we got from _M_path_end[leaf_index - __i - 1]
890 // to _M_path_end[leaf_index - __i] by going to the
891 // __right. Assumes path_cache_len <= 9.
892 _CharT _M_tmp_buf
[_S_iterator_buf_len
];
893 // Short buffer for surrounding chars.
894 // This is useful primarily for
895 // RopeFunctions. We put the buffer
896 // here to avoid locking in the
897 // multithreaded case.
898 // The cached path is generally assumed to be valid
899 // only if the buffer is valid.
900 static void _S_setbuf(_Rope_iterator_base
& __x
);
901 // Set buffer contents given
903 static void _S_setcache(_Rope_iterator_base
& __x
);
904 // Set buffer contents and
906 static void _S_setcache_for_incr(_Rope_iterator_base
& __x
);
907 // As above, but assumes path
908 // cache is valid for previous posn.
909 _Rope_iterator_base() {}
910 _Rope_iterator_base(_RopeRep
* __root
, size_t __pos
)
911 : _M_root(__root
), _M_current_pos(__pos
), _M_buf_ptr(0) {}
912 void _M_incr(size_t __n
);
913 void _M_decr(size_t __n
);
915 size_t index() const { return _M_current_pos
; }
916 _Rope_iterator_base(const _Rope_iterator_base
& __x
) {
917 if (0 != __x
._M_buf_ptr
) {
920 _M_current_pos
= __x
._M_current_pos
;
921 _M_root
= __x
._M_root
;
927 template<class _CharT
, class _Alloc
> class _Rope_iterator
;
929 template<class _CharT
, class _Alloc
>
930 class _Rope_const_iterator
: public _Rope_iterator_base
<_CharT
,_Alloc
> {
931 friend class rope
<_CharT
,_Alloc
>;
933 _Rope_const_iterator(const _RopeRep
* __root
, size_t __pos
):
934 _Rope_iterator_base
<_CharT
,_Alloc
>(
935 const_cast<_RopeRep
*>(__root
), __pos
)
936 // Only nonconst iterators modify root ref count
939 typedef _CharT reference
; // Really a value. Returning a reference
940 // Would be a mess, since it would have
941 // to be included in refcount.
942 typedef const _CharT
* pointer
;
945 _Rope_const_iterator() {};
946 _Rope_const_iterator(const _Rope_const_iterator
& __x
) :
947 _Rope_iterator_base
<_CharT
,_Alloc
>(__x
) { }
948 _Rope_const_iterator(const _Rope_iterator
<_CharT
,_Alloc
>& __x
);
949 _Rope_const_iterator(const rope
<_CharT
,_Alloc
>& __r
, size_t __pos
) :
950 _Rope_iterator_base
<_CharT
,_Alloc
>(__r
._M_tree_ptr
, __pos
) {}
951 _Rope_const_iterator
& operator= (const _Rope_const_iterator
& __x
) {
952 if (0 != __x
._M_buf_ptr
) {
953 *(static_cast<_Rope_iterator_base
<_CharT
,_Alloc
>*>(this)) = __x
;
955 _M_current_pos
= __x
._M_current_pos
;
956 _M_root
= __x
._M_root
;
961 reference
operator*() {
962 if (0 == _M_buf_ptr
) _S_setcache(*this);
965 _Rope_const_iterator
& operator++() {
966 __GC_CONST _CharT
* __next
;
967 if (0 != _M_buf_ptr
&& (__next
= _M_buf_ptr
+ 1) < _M_buf_end
) {
975 _Rope_const_iterator
& operator+=(ptrdiff_t __n
) {
983 _Rope_const_iterator
& operator--() {
987 _Rope_const_iterator
& operator-=(ptrdiff_t __n
) {
995 _Rope_const_iterator
operator++(int) {
996 size_t __old_pos
= _M_current_pos
;
998 return _Rope_const_iterator
<_CharT
,_Alloc
>(_M_root
, __old_pos
);
999 // This makes a subsequent dereference expensive.
1000 // Perhaps we should instead copy the iterator
1001 // if it has a valid cache?
1003 _Rope_const_iterator
operator--(int) {
1004 size_t __old_pos
= _M_current_pos
;
1006 return _Rope_const_iterator
<_CharT
,_Alloc
>(_M_root
, __old_pos
);
1008 friend _Rope_const_iterator
<_CharT
,_Alloc
> operator- __STL_NULL_TMPL_ARGS
1009 (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
1011 friend _Rope_const_iterator
<_CharT
,_Alloc
> operator+ __STL_NULL_TMPL_ARGS
1012 (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
1014 friend _Rope_const_iterator
<_CharT
,_Alloc
> operator+ __STL_NULL_TMPL_ARGS
1016 const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
);
1017 reference
operator[](size_t __n
) {
1018 return rope
<_CharT
,_Alloc
>::_S_fetch(_M_root
, _M_current_pos
+ __n
);
1020 friend bool operator== __STL_NULL_TMPL_ARGS
1021 (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
1022 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
);
1023 friend bool operator< __STL_NULL_TMPL_ARGS
1024 (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
1025 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
);
1026 friend ptrdiff_t operator- __STL_NULL_TMPL_ARGS
1027 (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
1028 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
);
1031 template<class _CharT
, class _Alloc
>
1032 class _Rope_iterator
: public _Rope_iterator_base
<_CharT
,_Alloc
> {
1033 friend class rope
<_CharT
,_Alloc
>;
1035 rope
<_CharT
,_Alloc
>* _M_root_rope
;
1036 // root is treated as a cached version of this,
1037 // and is used to detect changes to the underlying
1039 // Root is included in the reference count.
1040 // This is necessary so that we can detect changes reliably.
1041 // Unfortunately, it requires careful bookkeeping for the
1043 _Rope_iterator(rope
<_CharT
,_Alloc
>* __r
, size_t __pos
)
1044 : _Rope_iterator_base
<_CharT
,_Alloc
>(__r
->_M_tree_ptr
, __pos
),
1046 { _RopeRep::_S_ref(_M_root
); }
1050 typedef _Rope_char_ref_proxy
<_CharT
,_Alloc
> reference
;
1051 typedef _Rope_char_ref_proxy
<_CharT
,_Alloc
>* pointer
;
1054 rope
<_CharT
,_Alloc
>& container() { return *_M_root_rope
; }
1056 _M_root
= 0; // Needed for reference counting.
1058 _Rope_iterator(const _Rope_iterator
& __x
) :
1059 _Rope_iterator_base
<_CharT
,_Alloc
>(__x
) {
1060 _M_root_rope
= __x
._M_root_rope
;
1061 _RopeRep::_S_ref(_M_root
);
1063 _Rope_iterator(rope
<_CharT
,_Alloc
>& __r
, size_t __pos
);
1065 _RopeRep::_S_unref(_M_root
);
1067 _Rope_iterator
& operator= (const _Rope_iterator
& __x
) {
1068 _RopeRep
* __old
= _M_root
;
1070 _RopeRep::_S_ref(__x
._M_root
);
1071 if (0 != __x
._M_buf_ptr
) {
1072 _M_root_rope
= __x
._M_root_rope
;
1073 *(static_cast<_Rope_iterator_base
<_CharT
,_Alloc
>*>(this)) = __x
;
1075 _M_current_pos
= __x
._M_current_pos
;
1076 _M_root
= __x
._M_root
;
1077 _M_root_rope
= __x
._M_root_rope
;
1080 _RopeRep::_S_unref(__old
);
1083 reference
operator*() {
1085 if (0 == _M_buf_ptr
) {
1086 return _Rope_char_ref_proxy
<_CharT
,_Alloc
>(
1087 _M_root_rope
, _M_current_pos
);
1089 return _Rope_char_ref_proxy
<_CharT
,_Alloc
>(
1090 _M_root_rope
, _M_current_pos
, *_M_buf_ptr
);
1093 _Rope_iterator
& operator++() {
1097 _Rope_iterator
& operator+=(difference_type __n
) {
1105 _Rope_iterator
& operator--() {
1109 _Rope_iterator
& operator-=(difference_type __n
) {
1117 _Rope_iterator
operator++(int) {
1118 size_t __old_pos
= _M_current_pos
;
1120 return _Rope_iterator
<_CharT
,_Alloc
>(_M_root_rope
, __old_pos
);
1122 _Rope_iterator
operator--(int) {
1123 size_t __old_pos
= _M_current_pos
;
1125 return _Rope_iterator
<_CharT
,_Alloc
>(_M_root_rope
, __old_pos
);
1127 reference
operator[](ptrdiff_t __n
) {
1128 return _Rope_char_ref_proxy
<_CharT
,_Alloc
>(
1129 _M_root_rope
, _M_current_pos
+ __n
);
1131 friend bool operator== __STL_NULL_TMPL_ARGS
1132 (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
1133 const _Rope_iterator
<_CharT
,_Alloc
>& __y
);
1134 friend bool operator< __STL_NULL_TMPL_ARGS
1135 (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
1136 const _Rope_iterator
<_CharT
,_Alloc
>& __y
);
1137 friend ptrdiff_t operator- __STL_NULL_TMPL_ARGS
1138 (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
1139 const _Rope_iterator
<_CharT
,_Alloc
>& __y
);
1140 friend _Rope_iterator
<_CharT
,_Alloc
> operator- __STL_NULL_TMPL_ARGS
1141 (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
1143 friend _Rope_iterator
<_CharT
,_Alloc
> operator+ __STL_NULL_TMPL_ARGS
1144 (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
1146 friend _Rope_iterator
<_CharT
,_Alloc
> operator+ __STL_NULL_TMPL_ARGS
1148 const _Rope_iterator
<_CharT
,_Alloc
>& __x
);
1152 #if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
1153 #pragma reset woff 1375
1156 // The rope base class encapsulates
1157 // the differences between SGI-style allocators and standard-conforming
1160 #ifdef __STL_USE_STD_ALLOCATORS
1162 // Base class for ordinary allocators.
1163 template <class _CharT
, class _Allocator
, bool _IsStatic
>
1164 class _Rope_alloc_base
{
1166 typedef _Rope_RopeRep
<_CharT
,_Allocator
> _RopeRep
;
1167 typedef typename _Alloc_traits
<_CharT
,_Allocator
>::allocator_type
1169 allocator_type
get_allocator() const { return _M_data_allocator
; }
1170 _Rope_alloc_base(_RopeRep
*__t
, const allocator_type
& __a
)
1171 : _M_tree_ptr(__t
), _M_data_allocator(__a
) {}
1172 _Rope_alloc_base(const allocator_type
& __a
)
1173 : _M_data_allocator(__a
) {}
1176 // The only data members of a rope:
1177 allocator_type _M_data_allocator
;
1178 _RopeRep
* _M_tree_ptr
;
1180 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1182 _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
1183 _Tp* __name##_allocate(size_t __n) const \
1184 { return __name##Allocator(_M_data_allocator).allocate(__n); } \
1185 void __name##_deallocate(_Tp *__p, size_t __n) const \
1186 { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
1187 __ROPE_DEFINE_ALLOCS(_Allocator
)
1188 # undef __ROPE_DEFINE_ALLOC
1191 // Specialization for allocators that have the property that we don't
1192 // actually have to store an allocator object.
1193 template <class _CharT
, class _Allocator
>
1194 class _Rope_alloc_base
<_CharT
,_Allocator
,true> {
1196 typedef _Rope_RopeRep
<_CharT
,_Allocator
> _RopeRep
;
1197 typedef typename _Alloc_traits
<_CharT
,_Allocator
>::allocator_type
1199 allocator_type
get_allocator() const { return allocator_type(); }
1200 _Rope_alloc_base(_RopeRep
*__t
, const allocator_type
&)
1201 : _M_tree_ptr(__t
) {}
1202 _Rope_alloc_base(const allocator_type
&) {}
1205 // The only data member of a rope:
1206 _RopeRep
*_M_tree_ptr
;
1208 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1210 _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
1212 _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
1213 static _Tp* __name##_allocate(size_t __n) \
1214 { return __name##Alloc::allocate(__n); } \
1215 static void __name##_deallocate(_Tp *__p, size_t __n) \
1216 { __name##Alloc::deallocate(__p, __n); }
1217 __ROPE_DEFINE_ALLOCS(_Allocator
)
1218 # undef __ROPE_DEFINE_ALLOC
1221 template <class _CharT
, class _Alloc
>
1223 : public _Rope_alloc_base
<_CharT
,_Alloc
,
1224 _Alloc_traits
<_CharT
,_Alloc
>::_S_instanceless
>
1226 typedef _Rope_alloc_base
<_CharT
,_Alloc
,
1227 _Alloc_traits
<_CharT
,_Alloc
>::_S_instanceless
>
1229 typedef typename
_Base::allocator_type allocator_type
;
1230 _Rope_base(_RopeRep
* __t
, const allocator_type
& __a
) : _Base(__t
, __a
) {}
1231 _Rope_base(const allocator_type
& __a
) : _Base(__a
) {}
1234 #else /* !__STL_USE_STD_ALLOCATORS */
1236 template <class _CharT
, class _Alloc
>
1239 typedef _Rope_RopeRep
<_CharT
, _Alloc
> _RopeRep
;
1240 typedef _Alloc allocator_type
;
1241 static allocator_type
get_allocator() { return allocator_type(); }
1242 _Rope_base(_RopeRep
* __t
, const allocator_type
&) : _M_tree_ptr(__t
) {}
1243 _Rope_base(const allocator_type
&) {}
1246 // The only data member of a rope:
1247 _RopeRep
* _M_tree_ptr
;
1249 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1250 typedef simple_alloc<_Tp, _Alloc> __name##Alloc; \
1251 static _Tp* __name##_allocate(size_t __n) \
1252 { return __name##Alloc::allocate(__n); } \
1253 static void __name##_deallocate(_Tp *__p, size_t __n) \
1254 { __name##Alloc::deallocate(__p, __n); }
1255 __ROPE_DEFINE_ALLOCS(_Alloc
)
1256 # undef __ROPE_DEFINE_ALLOC
1259 #endif /* __STL_USE_STD_ALLOCATORS */
1262 template <class _CharT
, class _Alloc
>
1263 class rope
: public _Rope_base
<_CharT
,_Alloc
> {
1265 typedef _CharT value_type
;
1266 typedef ptrdiff_t difference_type
;
1267 typedef size_t size_type
;
1268 typedef _CharT const_reference
;
1269 typedef const _CharT
* const_pointer
;
1270 typedef _Rope_iterator
<_CharT
,_Alloc
> iterator
;
1271 typedef _Rope_const_iterator
<_CharT
,_Alloc
> const_iterator
;
1272 typedef _Rope_char_ref_proxy
<_CharT
,_Alloc
> reference
;
1273 typedef _Rope_char_ptr_proxy
<_CharT
,_Alloc
> pointer
;
1275 friend class _Rope_iterator
<_CharT
,_Alloc
>;
1276 friend class _Rope_const_iterator
<_CharT
,_Alloc
>;
1277 friend struct _Rope_RopeRep
<_CharT
,_Alloc
>;
1278 friend class _Rope_iterator_base
<_CharT
,_Alloc
>;
1279 friend class _Rope_char_ptr_proxy
<_CharT
,_Alloc
>;
1280 friend class _Rope_char_ref_proxy
<_CharT
,_Alloc
>;
1281 friend struct _Rope_RopeSubstring
<_CharT
,_Alloc
>;
1284 typedef _Rope_base
<_CharT
,_Alloc
> _Base
;
1285 typedef typename
_Base::allocator_type allocator_type
;
1286 # ifdef __STL_USE_NAMESPACES
1287 using _Base::_M_tree_ptr
;
1289 typedef __GC_CONST _CharT
* _Cstrptr
;
1290 # ifdef __STL_SGI_THREADS
1291 static _Cstrptr
_S_atomic_swap(_Cstrptr
* __p
, _Cstrptr __q
) {
1292 # if __mips < 3 || !(defined (_ABIN32) || defined(_ABI64))
1293 return (_Cstrptr
) test_and_set((unsigned long*)__p
,
1294 (unsigned long)__q
);
1296 return (_Cstrptr
) __test_and_set((unsigned long*)__p
,
1297 (unsigned long)__q
);
1300 # elif defined(__STL_WIN32THREADS)
1301 static _Cstrptr
_S_atomic_swap(_Cstrptr
* __p
, _Cstrptr __q
) {
1302 return (_Cstrptr
) InterlockedExchange(
1303 (LPLONG
)__p
, (LONG
)__q
);
1305 # elif defined(__STL_PTHREADS)
1306 // This should be portable, but performance is expected
1307 // to be quite awful. This really needs platform specific
1309 static pthread_mutex_t _S_swap_lock
;
1310 static _Cstrptr
_S_atomic_swap(_Cstrptr
* __p
, _Cstrptr __q
) {
1311 pthread_mutex_lock(&_S_swap_lock
);
1312 _Cstrptr __result
= *__p
;
1314 pthread_mutex_unlock(&_S_swap_lock
);
1318 static _Cstrptr
_S_atomic_swap(_Cstrptr
* __p
, _Cstrptr __q
) {
1319 _Cstrptr __result
= *__p
;
1325 static _CharT _S_empty_c_str
[1];
1327 static bool _S_is0(_CharT __c
) { return __c
== _S_eos((_CharT
*)0); }
1328 enum { _S_copy_max
= 23 };
1329 // For strings shorter than _S_copy_max, we copy to
1332 typedef _Rope_RopeRep
<_CharT
,_Alloc
> _RopeRep
;
1333 typedef _Rope_RopeConcatenation
<_CharT
,_Alloc
> _RopeConcatenation
;
1334 typedef _Rope_RopeLeaf
<_CharT
,_Alloc
> _RopeLeaf
;
1335 typedef _Rope_RopeFunction
<_CharT
,_Alloc
> _RopeFunction
;
1336 typedef _Rope_RopeSubstring
<_CharT
,_Alloc
> _RopeSubstring
;
1338 // Retrieve a character at the indicated position.
1339 static _CharT
_S_fetch(_RopeRep
* __r
, size_type __pos
);
1342 // Obtain a pointer to the character at the indicated position.
1343 // The pointer can be used to change the character.
1344 // If such a pointer cannot be produced, as is frequently the
1345 // case, 0 is returned instead.
1346 // (Returns nonzero only if all nodes in the path have a refcount
1348 static _CharT
* _S_fetch_ptr(_RopeRep
* __r
, size_type __pos
);
1351 static bool _S_apply_to_pieces(
1352 // should be template parameter
1353 _Rope_char_consumer
<_CharT
>& __c
,
1354 const _RopeRep
* __r
,
1355 size_t __begin
, size_t __end
);
1356 // begin and end are assumed to be in range.
1359 static void _S_unref(_RopeRep
* __t
)
1361 _RopeRep::_S_unref(__t
);
1363 static void _S_ref(_RopeRep
* __t
)
1365 _RopeRep::_S_ref(__t
);
1368 static void _S_unref(_RopeRep
*) {}
1369 static void _S_ref(_RopeRep
*) {}
1374 typedef _Rope_RopeRep
<_CharT
,_Alloc
>* _Self_destruct_ptr
;
1376 typedef _Rope_self_destruct_ptr
<_CharT
,_Alloc
> _Self_destruct_ptr
;
1379 // _Result is counted in refcount.
1380 static _RopeRep
* _S_substring(_RopeRep
* __base
,
1381 size_t __start
, size_t __endp1
);
1383 static _RopeRep
* _S_concat_char_iter(_RopeRep
* __r
,
1384 const _CharT
* __iter
, size_t __slen
);
1385 // Concatenate rope and char ptr, copying __s.
1386 // Should really take an arbitrary iterator.
1387 // Result is counted in refcount.
1388 static _RopeRep
* _S_destr_concat_char_iter(_RopeRep
* __r
,
1389 const _CharT
* __iter
, size_t __slen
)
1390 // As above, but one reference to __r is about to be
1391 // destroyed. Thus the pieces may be recycled if all
1392 // relevent reference counts are 1.
1394 // We can't really do anything since refcounts are unavailable.
1395 { return _S_concat_char_iter(__r
, __iter
, __slen
); }
1400 static _RopeRep
* _S_concat(_RopeRep
* __left
, _RopeRep
* __right
);
1401 // General concatenation on _RopeRep. _Result
1402 // has refcount of 1. Adjusts argument refcounts.
1405 void apply_to_pieces( size_t __begin
, size_t __end
,
1406 _Rope_char_consumer
<_CharT
>& __c
) const {
1407 _S_apply_to_pieces(__c
, _M_tree_ptr
, __begin
, __end
);
1413 static size_t _S_rounded_up_size(size_t __n
) {
1414 return _RopeLeaf::_S_rounded_up_size(__n
);
1417 static size_t _S_allocated_capacity(size_t __n
) {
1418 if (_S_is_basic_char_type((_CharT
*)0)) {
1419 return _S_rounded_up_size(__n
) - 1;
1421 return _S_rounded_up_size(__n
);
1425 // Allocate and construct a RopeLeaf using the supplied allocator
1426 // Takes ownership of s instead of copying.
1427 static _RopeLeaf
* _S_new_RopeLeaf(__GC_CONST _CharT
*__s
,
1428 size_t __size
, allocator_type __a
)
1430 # ifdef __STL_USE_STD_ALLOCATORS
1431 _RopeLeaf
* __space
= _LAllocator(__a
).allocate(1);
1433 _RopeLeaf
* __space
= _L_allocate(1);
1435 return new(__space
) _RopeLeaf(__s
, __size
, __a
);
1438 static _RopeConcatenation
* _S_new_RopeConcatenation(
1439 _RopeRep
* __left
, _RopeRep
* __right
,
1442 # ifdef __STL_USE_STD_ALLOCATORS
1443 _RopeConcatenation
* __space
= _CAllocator(__a
).allocate(1);
1445 _RopeConcatenation
* __space
= _C_allocate(1);
1447 return new(__space
) _RopeConcatenation(__left
, __right
, __a
);
1450 static _RopeFunction
* _S_new_RopeFunction(char_producer
<_CharT
>* __f
,
1451 size_t __size
, bool __d
, allocator_type __a
)
1453 # ifdef __STL_USE_STD_ALLOCATORS
1454 _RopeFunction
* __space
= _FAllocator(__a
).allocate(1);
1456 _RopeFunction
* __space
= _F_allocate(1);
1458 return new(__space
) _RopeFunction(__f
, __size
, __d
, __a
);
1461 static _RopeSubstring
* _S_new_RopeSubstring(
1462 _Rope_RopeRep
<_CharT
,_Alloc
>* __b
, size_t __s
,
1463 size_t __l
, allocator_type __a
)
1465 # ifdef __STL_USE_STD_ALLOCATORS
1466 _RopeSubstring
* __space
= _SAllocator(__a
).allocate(1);
1468 _RopeSubstring
* __space
= _S_allocate(1);
1470 return new(__space
) _RopeSubstring(__b
, __s
, __l
, __a
);
1473 # ifdef __STL_USE_STD_ALLOCATORS
1475 _RopeLeaf
* _S_RopeLeaf_from_unowned_char_ptr(const _CharT
*__s
,
1476 size_t __size
, allocator_type __a
)
1477 # define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
1478 _S_RopeLeaf_from_unowned_char_ptr(__s, __size, __a)
1481 _RopeLeaf
* _S_RopeLeaf_from_unowned_char_ptr2(const _CharT
* __s
,
1483 # define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
1484 _S_RopeLeaf_from_unowned_char_ptr2(__s, __size)
1487 if (0 == __size
) return 0;
1488 # ifdef __STL_USE_STD_ALLOCATORS
1489 _CharT
* __buf
= __a
.allocate(_S_rounded_up_size(__size
));
1491 _CharT
* __buf
= _Data_allocate(_S_rounded_up_size(__size
));
1492 allocator_type __a
= allocator_type();
1495 uninitialized_copy_n(__s
, __size
, __buf
);
1496 _S_cond_store_eos(__buf
[__size
]);
1498 return _S_new_RopeLeaf(__buf
, __size
, __a
);
1500 __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf
, __size
, __a
))
1504 // Concatenation of nonempty strings.
1505 // Always builds a concatenation node.
1506 // Rebalances if the result is too deep.
1507 // Result has refcount 1.
1508 // Does not increment left and right ref counts even though
1509 // they are referenced.
1511 _S_tree_concat(_RopeRep
* __left
, _RopeRep
* __right
);
1513 // Concatenation helper functions
1515 _S_leaf_concat_char_iter(_RopeLeaf
* __r
,
1516 const _CharT
* __iter
, size_t __slen
);
1517 // Concatenate by copying leaf.
1518 // should take an arbitrary iterator
1519 // result has refcount 1.
1521 static _RopeLeaf
* _S_destr_leaf_concat_char_iter
1522 (_RopeLeaf
* __r
, const _CharT
* __iter
, size_t __slen
);
1523 // A version that potentially clobbers __r if __r->_M_refcount == 1.
1526 // A helper function for exponentiating strings.
1527 // This uses a nonstandard refcount convention.
1528 // The result has refcount 0.
1530 : public binary_function
<rope
<_CharT
,_Alloc
>,
1531 rope
<_CharT
,_Alloc
>,
1532 rope
<_CharT
,_Alloc
> > {
1533 rope
operator() (const rope
& __x
, const rope
& __y
) {
1538 // Needed by the call to "power" used to build ropes
1539 // consisting of n copies of a character.
1540 friend rope
identity_element(_Concat_fn
)
1541 { return rope
<_CharT
,_Alloc
>(); }
1543 static size_t _S_char_ptr_len(const _CharT
* __s
);
1544 // slightly generalized strlen
1546 rope(_RopeRep
* __t
, const allocator_type
& __a
= allocator_type())
1547 : _Base(__t
,__a
) { }
1550 // Copy __r to the _CharT buffer.
1551 // Returns __buffer + __r->_M_size.
1552 // Assumes that buffer is uninitialized.
1553 static _CharT
* _S_flatten(_RopeRep
* __r
, _CharT
* __buffer
);
1555 // Again, with explicit starting position and length.
1556 // Assumes that buffer is uninitialized.
1557 static _CharT
* _S_flatten(_RopeRep
* __r
,
1558 size_t __start
, size_t __len
,
1561 static const unsigned long
1562 _S_min_len
[_RopeRep::_S_max_rope_depth
+ 1];
1564 static bool _S_is_balanced(_RopeRep
* __r
)
1565 { return (__r
->_M_size
>= _S_min_len
[__r
->_M_depth
]); }
1567 static bool _S_is_almost_balanced(_RopeRep
* __r
)
1568 { return (__r
->_M_depth
== 0 ||
1569 __r
->_M_size
>= _S_min_len
[__r
->_M_depth
- 1]); }
1571 static bool _S_is_roughly_balanced(_RopeRep
* __r
)
1572 { return (__r
->_M_depth
<= 1 ||
1573 __r
->_M_size
>= _S_min_len
[__r
->_M_depth
- 2]); }
1575 // Assumes the result is not empty.
1576 static _RopeRep
* _S_concat_and_set_balanced(_RopeRep
* __left
,
1579 _RopeRep
* __result
= _S_concat(__left
, __right
);
1580 if (_S_is_balanced(__result
)) __result
->_M_is_balanced
= true;
1584 // The basic rebalancing operation. Logically copies the
1585 // rope. The result has refcount of 1. The client will
1586 // usually decrement the reference count of __r.
1587 // The result is within height 2 of balanced by the above
1589 static _RopeRep
* _S_balance(_RopeRep
* __r
);
1591 // Add all unbalanced subtrees to the forest of balanceed trees.
1592 // Used only by balance.
1593 static void _S_add_to_forest(_RopeRep
*__r
, _RopeRep
** __forest
);
1595 // Add __r to forest, assuming __r is already balanced.
1596 static void _S_add_leaf_to_forest(_RopeRep
* __r
, _RopeRep
** __forest
);
1598 // Print to stdout, exposing structure
1599 static void _S_dump(_RopeRep
* __r
, int __indent
= 0);
1601 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1602 static int _S_compare(const _RopeRep
* __x
, const _RopeRep
* __y
);
1605 bool empty() const { return 0 == _M_tree_ptr
; }
1607 // Comparison member function. This is public only for those
1608 // clients that need a ternary comparison. Others
1609 // should use the comparison operators below.
1610 int compare(const rope
& __y
) const {
1611 return _S_compare(_M_tree_ptr
, __y
._M_tree_ptr
);
1614 rope(const _CharT
* __s
, const allocator_type
& __a
= allocator_type())
1615 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s
, _S_char_ptr_len(__s
),
1619 rope(const _CharT
* __s
, size_t __len
,
1620 const allocator_type
& __a
= allocator_type())
1621 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s
, __len
, __a
), __a
)
1624 // Should perhaps be templatized with respect to the iterator type
1625 // and use Sequence_buffer. (It should perhaps use sequence_buffer
1627 rope(const _CharT
*__s
, const _CharT
*__e
,
1628 const allocator_type
& __a
= allocator_type())
1629 : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s
, __e
- __s
, __a
), __a
)
1632 rope(const const_iterator
& __s
, const const_iterator
& __e
,
1633 const allocator_type
& __a
= allocator_type())
1634 : _Base(_S_substring(__s
._M_root
, __s
._M_current_pos
,
1635 __e
._M_current_pos
), __a
)
1638 rope(const iterator
& __s
, const iterator
& __e
,
1639 const allocator_type
& __a
= allocator_type())
1640 : _Base(_S_substring(__s
._M_root
, __s
._M_current_pos
,
1641 __e
._M_current_pos
), __a
)
1644 rope(_CharT __c
, const allocator_type
& __a
= allocator_type())
1647 _CharT
* __buf
= _Data_allocate(_S_rounded_up_size(1));
1649 construct(__buf
, __c
);
1651 _M_tree_ptr
= _S_new_RopeLeaf(__buf
, 1, __a
);
1653 __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf
, 1, __a
))
1656 rope(size_t __n
, _CharT __c
,
1657 const allocator_type
& __a
= allocator_type());
1659 rope(const allocator_type
& __a
= allocator_type())
1662 // Construct a rope from a function that can compute its members
1663 rope(char_producer
<_CharT
> *__fn
, size_t __len
, bool __delete_fn
,
1664 const allocator_type
& __a
= allocator_type())
1667 _M_tree_ptr
= (0 == __len
) ?
1668 0 : _S_new_RopeFunction(__fn
, __len
, __delete_fn
, __a
);
1671 rope(const rope
& __x
, const allocator_type
& __a
= allocator_type())
1672 : _Base(__x
._M_tree_ptr
, __a
)
1674 _S_ref(_M_tree_ptr
);
1679 _S_unref(_M_tree_ptr
);
1682 rope
& operator=(const rope
& __x
)
1684 _RopeRep
* __old
= _M_tree_ptr
;
1685 # ifdef __STL_USE_STD_ALLOCATORS
1686 __stl_assert(get_allocator() == __x
.get_allocator());
1688 _M_tree_ptr
= __x
._M_tree_ptr
;
1689 _S_ref(_M_tree_ptr
);
1694 void push_back(_CharT __x
)
1696 _RopeRep
* __old
= _M_tree_ptr
;
1697 _M_tree_ptr
= _S_concat_char_iter(_M_tree_ptr
, &__x
, 1);
1703 _RopeRep
* __old
= _M_tree_ptr
;
1705 _S_substring(_M_tree_ptr
, 0, _M_tree_ptr
->_M_size
- 1);
1711 return _S_fetch(_M_tree_ptr
, _M_tree_ptr
->_M_size
- 1);
1714 void push_front(_CharT __x
)
1716 _RopeRep
* __old
= _M_tree_ptr
;
1718 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x
, 1, get_allocator());
1720 _M_tree_ptr
= _S_concat(__left
, _M_tree_ptr
);
1724 __STL_UNWIND(_S_unref(__left
))
1729 _RopeRep
* __old
= _M_tree_ptr
;
1730 _M_tree_ptr
= _S_substring(_M_tree_ptr
, 1, _M_tree_ptr
->_M_size
);
1734 _CharT
front() const
1736 return _S_fetch(_M_tree_ptr
, 0);
1741 _RopeRep
* __old
= _M_tree_ptr
;
1742 _M_tree_ptr
= _S_balance(_M_tree_ptr
);
1746 void copy(_CharT
* __buffer
) const {
1747 destroy(__buffer
, __buffer
+ size());
1748 _S_flatten(_M_tree_ptr
, __buffer
);
1751 // This is the copy function from the standard, but
1752 // with the arguments reordered to make it consistent with the
1753 // rest of the interface.
1754 // Note that this guaranteed not to compile if the draft standard
1755 // order is assumed.
1756 size_type
copy(size_type __pos
, size_type __n
, _CharT
* __buffer
) const
1758 size_t __size
= size();
1759 size_t __len
= (__pos
+ __n
> __size
? __size
- __pos
: __n
);
1761 destroy(__buffer
, __buffer
+ __len
);
1762 _S_flatten(_M_tree_ptr
, __pos
, __len
, __buffer
);
1766 // Print to stdout, exposing structure. May be useful for
1767 // performance debugging.
1769 _S_dump(_M_tree_ptr
);
1772 // Convert to 0 terminated string in new allocated memory.
1773 // Embedded 0s in the input do not terminate the copy.
1774 const _CharT
* c_str() const;
1776 // As above, but lso use the flattened representation as the
1777 // the new rope representation.
1778 const _CharT
* replace_with_c_str();
1780 // Reclaim memory for the c_str generated flattened string.
1781 // Intentionally undocumented, since it's hard to say when this
1782 // is safe for multiple threads.
1783 void delete_c_str () {
1784 if (0 == _M_tree_ptr
) return;
1785 if (_RopeRep::_S_leaf
== _M_tree_ptr
->_M_tag
&&
1786 ((_RopeLeaf
*)_M_tree_ptr
)->_M_data
==
1787 _M_tree_ptr
->_M_c_string
) {
1788 // Representation shared
1792 _M_tree_ptr
->_M_free_c_string();
1794 _M_tree_ptr
->_M_c_string
= 0;
1797 _CharT
operator[] (size_type __pos
) const {
1798 return _S_fetch(_M_tree_ptr
, __pos
);
1801 _CharT
at(size_type __pos
) const {
1802 // if (__pos >= size()) throw out_of_range; // XXX
1803 return (*this)[__pos
];
1806 const_iterator
begin() const {
1807 return(const_iterator(_M_tree_ptr
, 0));
1810 // An easy way to get a const iterator from a non-const container.
1811 const_iterator
const_begin() const {
1812 return(const_iterator(_M_tree_ptr
, 0));
1815 const_iterator
end() const {
1816 return(const_iterator(_M_tree_ptr
, size()));
1819 const_iterator
const_end() const {
1820 return(const_iterator(_M_tree_ptr
, size()));
1823 size_type
size() const {
1824 return(0 == _M_tree_ptr
? 0 : _M_tree_ptr
->_M_size
);
1827 size_type
length() const {
1831 size_type
max_size() const {
1832 return _S_min_len
[_RopeRep::_S_max_rope_depth
-1] - 1;
1833 // Guarantees that the result can be sufficirntly
1834 // balanced. Longer ropes will probably still work,
1835 // but it's harder to make guarantees.
1838 # ifdef __STL_CLASS_PARTIAL_SPECIALIZATION
1839 typedef reverse_iterator
<const_iterator
> const_reverse_iterator
;
1840 # else /* __STL_CLASS_PARTIAL_SPECIALIZATION */
1841 typedef reverse_iterator
<const_iterator
, value_type
, const_reference
,
1842 difference_type
> const_reverse_iterator
;
1843 # endif /* __STL_CLASS_PARTIAL_SPECIALIZATION */
1845 const_reverse_iterator
rbegin() const {
1846 return const_reverse_iterator(end());
1849 const_reverse_iterator
const_rbegin() const {
1850 return const_reverse_iterator(end());
1853 const_reverse_iterator
rend() const {
1854 return const_reverse_iterator(begin());
1857 const_reverse_iterator
const_rend() const {
1858 return const_reverse_iterator(begin());
1861 friend rope
<_CharT
,_Alloc
>
1862 operator+ __STL_NULL_TMPL_ARGS (const rope
<_CharT
,_Alloc
>& __left
,
1863 const rope
<_CharT
,_Alloc
>& __right
);
1865 friend rope
<_CharT
,_Alloc
>
1866 operator+ __STL_NULL_TMPL_ARGS (const rope
<_CharT
,_Alloc
>& __left
,
1867 const _CharT
* __right
);
1869 friend rope
<_CharT
,_Alloc
>
1870 operator+ __STL_NULL_TMPL_ARGS (const rope
<_CharT
,_Alloc
>& __left
,
1873 // The symmetric cases are intentionally omitted, since they're presumed
1874 // to be less common, and we don't handle them as well.
1876 // The following should really be templatized.
1877 // The first argument should be an input iterator or
1878 // forward iterator with value_type _CharT.
1879 rope
& append(const _CharT
* __iter
, size_t __n
) {
1880 _RopeRep
* __result
=
1881 _S_destr_concat_char_iter(_M_tree_ptr
, __iter
, __n
);
1882 _S_unref(_M_tree_ptr
);
1883 _M_tree_ptr
= __result
;
1887 rope
& append(const _CharT
* __c_string
) {
1888 size_t __len
= _S_char_ptr_len(__c_string
);
1889 append(__c_string
, __len
);
1893 rope
& append(const _CharT
* __s
, const _CharT
* __e
) {
1894 _RopeRep
* __result
=
1895 _S_destr_concat_char_iter(_M_tree_ptr
, __s
, __e
- __s
);
1896 _S_unref(_M_tree_ptr
);
1897 _M_tree_ptr
= __result
;
1901 rope
& append(const_iterator __s
, const_iterator __e
) {
1902 __stl_assert(__s
._M_root
== __e
._M_root
);
1903 # ifdef __STL_USE_STD_ALLOCATORS
1904 __stl_assert(get_allocator() == __s
._M_root
->get_allocator());
1906 _Self_destruct_ptr
__appendee(_S_substring(
1907 __s
._M_root
, __s
._M_current_pos
, __e
._M_current_pos
));
1908 _RopeRep
* __result
=
1909 _S_concat(_M_tree_ptr
, (_RopeRep
*)__appendee
);
1910 _S_unref(_M_tree_ptr
);
1911 _M_tree_ptr
= __result
;
1915 rope
& append(_CharT __c
) {
1916 _RopeRep
* __result
=
1917 _S_destr_concat_char_iter(_M_tree_ptr
, &__c
, 1);
1918 _S_unref(_M_tree_ptr
);
1919 _M_tree_ptr
= __result
;
1923 rope
& append() { return append(_CharT()); } // XXX why?
1925 rope
& append(const rope
& __y
) {
1926 # ifdef __STL_USE_STD_ALLOCATORS
1927 __stl_assert(__y
.get_allocator() == get_allocator());
1929 _RopeRep
* __result
= _S_concat(_M_tree_ptr
, __y
._M_tree_ptr
);
1930 _S_unref(_M_tree_ptr
);
1931 _M_tree_ptr
= __result
;
1935 rope
& append(size_t __n
, _CharT __c
) {
1936 rope
<_CharT
,_Alloc
> __last(__n
, __c
);
1937 return append(__last
);
1940 void swap(rope
& __b
) {
1941 # ifdef __STL_USE_STD_ALLOCATORS
1942 __stl_assert(get_allocator() == __b
.get_allocator());
1944 _RopeRep
* __tmp
= _M_tree_ptr
;
1945 _M_tree_ptr
= __b
._M_tree_ptr
;
1946 __b
._M_tree_ptr
= __tmp
;
1951 // Result is included in refcount.
1952 static _RopeRep
* replace(_RopeRep
* __old
, size_t __pos1
,
1953 size_t __pos2
, _RopeRep
* __r
) {
1954 if (0 == __old
) { _S_ref(__r
); return __r
; }
1955 _Self_destruct_ptr
__left(
1956 _S_substring(__old
, 0, __pos1
));
1957 _Self_destruct_ptr
__right(
1958 _S_substring(__old
, __pos2
, __old
->_M_size
));
1961 # ifdef __STL_USE_STD_ALLOCATORS
1962 __stl_assert(__old
->get_allocator() == __r
->get_allocator());
1965 __result
= _S_concat(__left
, __right
);
1967 _Self_destruct_ptr
__left_result(_S_concat(__left
, __r
));
1968 __result
= _S_concat(__left_result
, __right
);
1974 void insert(size_t __p
, const rope
& __r
) {
1975 _RopeRep
* __result
=
1976 replace(_M_tree_ptr
, __p
, __p
, __r
._M_tree_ptr
);
1977 # ifdef __STL_USE_STD_ALLOCATORS
1978 __stl_assert(get_allocator() == __r
.get_allocator());
1980 _S_unref(_M_tree_ptr
);
1981 _M_tree_ptr
= __result
;
1984 void insert(size_t __p
, size_t __n
, _CharT __c
) {
1985 rope
<_CharT
,_Alloc
> __r(__n
,__c
);
1989 void insert(size_t __p
, const _CharT
* __i
, size_t __n
) {
1990 _Self_destruct_ptr
__left(_S_substring(_M_tree_ptr
, 0, __p
));
1991 _Self_destruct_ptr
__right(_S_substring(_M_tree_ptr
, __p
, size()));
1992 _Self_destruct_ptr
__left_result(
1993 _S_concat_char_iter(__left
, __i
, __n
));
1994 _RopeRep
* __result
= _S_concat(__left_result
, __right
);
1995 _S_unref(_M_tree_ptr
);
1996 _M_tree_ptr
= __result
;
1999 void insert(size_t __p
, const _CharT
* __c_string
) {
2000 insert(__p
, __c_string
, _S_char_ptr_len(__c_string
));
2003 void insert(size_t __p
, _CharT __c
) {
2004 insert(__p
, &__c
, 1);
2007 void insert(size_t __p
) {
2008 _CharT __c
= _CharT();
2009 insert(__p
, &__c
, 1);
2012 void insert(size_t __p
, const _CharT
* __i
, const _CharT
* __j
) {
2017 void insert(size_t __p
, const const_iterator
& __i
,
2018 const const_iterator
& __j
) {
2023 void insert(size_t __p
, const iterator
& __i
,
2024 const iterator
& __j
) {
2029 // (position, length) versions of replace operations:
2031 void replace(size_t __p
, size_t __n
, const rope
& __r
) {
2032 _RopeRep
* __result
=
2033 replace(_M_tree_ptr
, __p
, __p
+ __n
, __r
._M_tree_ptr
);
2034 _S_unref(_M_tree_ptr
);
2035 _M_tree_ptr
= __result
;
2038 void replace(size_t __p
, size_t __n
,
2039 const _CharT
* __i
, size_t __i_len
) {
2040 rope
__r(__i
, __i_len
);
2041 replace(__p
, __n
, __r
);
2044 void replace(size_t __p
, size_t __n
, _CharT __c
) {
2046 replace(__p
, __n
, __r
);
2049 void replace(size_t __p
, size_t __n
, const _CharT
* __c_string
) {
2050 rope
__r(__c_string
);
2051 replace(__p
, __n
, __r
);
2054 void replace(size_t __p
, size_t __n
,
2055 const _CharT
* __i
, const _CharT
* __j
) {
2057 replace(__p
, __n
, __r
);
2060 void replace(size_t __p
, size_t __n
,
2061 const const_iterator
& __i
, const const_iterator
& __j
) {
2063 replace(__p
, __n
, __r
);
2066 void replace(size_t __p
, size_t __n
,
2067 const iterator
& __i
, const iterator
& __j
) {
2069 replace(__p
, __n
, __r
);
2072 // Single character variants:
2073 void replace(size_t __p
, _CharT __c
) {
2074 iterator
__i(this, __p
);
2078 void replace(size_t __p
, const rope
& __r
) {
2079 replace(__p
, 1, __r
);
2082 void replace(size_t __p
, const _CharT
* __i
, size_t __i_len
) {
2083 replace(__p
, 1, __i
, __i_len
);
2086 void replace(size_t __p
, const _CharT
* __c_string
) {
2087 replace(__p
, 1, __c_string
);
2090 void replace(size_t __p
, const _CharT
* __i
, const _CharT
* __j
) {
2091 replace(__p
, 1, __i
, __j
);
2094 void replace(size_t __p
, const const_iterator
& __i
,
2095 const const_iterator
& __j
) {
2096 replace(__p
, 1, __i
, __j
);
2099 void replace(size_t __p
, const iterator
& __i
,
2100 const iterator
& __j
) {
2101 replace(__p
, 1, __i
, __j
);
2104 // Erase, (position, size) variant.
2105 void erase(size_t __p
, size_t __n
) {
2106 _RopeRep
* __result
= replace(_M_tree_ptr
, __p
, __p
+ __n
, 0);
2107 _S_unref(_M_tree_ptr
);
2108 _M_tree_ptr
= __result
;
2111 // Erase, single character
2112 void erase(size_t __p
) {
2113 erase(__p
, __p
+ 1);
2116 // Insert, iterator variants.
2117 iterator
insert(const iterator
& __p
, const rope
& __r
)
2118 { insert(__p
.index(), __r
); return __p
; }
2119 iterator
insert(const iterator
& __p
, size_t __n
, _CharT __c
)
2120 { insert(__p
.index(), __n
, __c
); return __p
; }
2121 iterator
insert(const iterator
& __p
, _CharT __c
)
2122 { insert(__p
.index(), __c
); return __p
; }
2123 iterator
insert(const iterator
& __p
)
2124 { insert(__p
.index()); return __p
; }
2125 iterator
insert(const iterator
& __p
, const _CharT
* c_string
)
2126 { insert(__p
.index(), c_string
); return __p
; }
2127 iterator
insert(const iterator
& __p
, const _CharT
* __i
, size_t __n
)
2128 { insert(__p
.index(), __i
, __n
); return __p
; }
2129 iterator
insert(const iterator
& __p
, const _CharT
* __i
,
2131 { insert(__p
.index(), __i
, __j
); return __p
; }
2132 iterator
insert(const iterator
& __p
,
2133 const const_iterator
& __i
, const const_iterator
& __j
)
2134 { insert(__p
.index(), __i
, __j
); return __p
; }
2135 iterator
insert(const iterator
& __p
,
2136 const iterator
& __i
, const iterator
& __j
)
2137 { insert(__p
.index(), __i
, __j
); return __p
; }
2139 // Replace, range variants.
2140 void replace(const iterator
& __p
, const iterator
& __q
,
2142 { replace(__p
.index(), __q
.index() - __p
.index(), __r
); }
2143 void replace(const iterator
& __p
, const iterator
& __q
, _CharT __c
)
2144 { replace(__p
.index(), __q
.index() - __p
.index(), __c
); }
2145 void replace(const iterator
& __p
, const iterator
& __q
,
2146 const _CharT
* __c_string
)
2147 { replace(__p
.index(), __q
.index() - __p
.index(), __c_string
); }
2148 void replace(const iterator
& __p
, const iterator
& __q
,
2149 const _CharT
* __i
, size_t __n
)
2150 { replace(__p
.index(), __q
.index() - __p
.index(), __i
, __n
); }
2151 void replace(const iterator
& __p
, const iterator
& __q
,
2152 const _CharT
* __i
, const _CharT
* __j
)
2153 { replace(__p
.index(), __q
.index() - __p
.index(), __i
, __j
); }
2154 void replace(const iterator
& __p
, const iterator
& __q
,
2155 const const_iterator
& __i
, const const_iterator
& __j
)
2156 { replace(__p
.index(), __q
.index() - __p
.index(), __i
, __j
); }
2157 void replace(const iterator
& __p
, const iterator
& __q
,
2158 const iterator
& __i
, const iterator
& __j
)
2159 { replace(__p
.index(), __q
.index() - __p
.index(), __i
, __j
); }
2161 // Replace, iterator variants.
2162 void replace(const iterator
& __p
, const rope
& __r
)
2163 { replace(__p
.index(), __r
); }
2164 void replace(const iterator
& __p
, _CharT __c
)
2165 { replace(__p
.index(), __c
); }
2166 void replace(const iterator
& __p
, const _CharT
* __c_string
)
2167 { replace(__p
.index(), __c_string
); }
2168 void replace(const iterator
& __p
, const _CharT
* __i
, size_t __n
)
2169 { replace(__p
.index(), __i
, __n
); }
2170 void replace(const iterator
& __p
, const _CharT
* __i
, const _CharT
* __j
)
2171 { replace(__p
.index(), __i
, __j
); }
2172 void replace(const iterator
& __p
, const_iterator __i
,
2174 { replace(__p
.index(), __i
, __j
); }
2175 void replace(const iterator
& __p
, iterator __i
, iterator __j
)
2176 { replace(__p
.index(), __i
, __j
); }
2178 // Iterator and range variants of erase
2179 iterator
erase(const iterator
& __p
, const iterator
& __q
) {
2180 size_t __p_index
= __p
.index();
2181 erase(__p_index
, __q
.index() - __p_index
);
2182 return iterator(this, __p_index
);
2184 iterator
erase(const iterator
& __p
) {
2185 size_t __p_index
= __p
.index();
2186 erase(__p_index
, 1);
2187 return iterator(this, __p_index
);
2190 rope
substr(size_t __start
, size_t __len
= 1) const {
2191 return rope
<_CharT
,_Alloc
>(
2192 _S_substring(_M_tree_ptr
, __start
, __start
+ __len
));
2195 rope
substr(iterator __start
, iterator __end
) const {
2196 return rope
<_CharT
,_Alloc
>(
2197 _S_substring(_M_tree_ptr
, __start
.index(), __end
.index()));
2200 rope
substr(iterator __start
) const {
2201 size_t __pos
= __start
.index();
2202 return rope
<_CharT
,_Alloc
>(
2203 _S_substring(_M_tree_ptr
, __pos
, __pos
+ 1));
2206 rope
substr(const_iterator __start
, const_iterator __end
) const {
2207 // This might eventually take advantage of the cache in the
2209 return rope
<_CharT
,_Alloc
>(
2210 _S_substring(_M_tree_ptr
, __start
.index(), __end
.index()));
2213 rope
<_CharT
,_Alloc
> substr(const_iterator __start
) {
2214 size_t __pos
= __start
.index();
2215 return rope
<_CharT
,_Alloc
>(
2216 _S_substring(_M_tree_ptr
, __pos
, __pos
+ 1));
2219 static const size_type npos
;
2221 size_type
find(_CharT __c
, size_type __pos
= 0) const;
2222 size_type
find(_CharT
* __s
, size_type __pos
= 0) const {
2223 size_type __result_pos
;
2224 const_iterator __result
= search(const_begin() + __pos
, const_end(),
2225 __s
, __s
+ _S_char_ptr_len(__s
));
2226 __result_pos
= __result
.index();
2227 # ifndef __STL_OLD_ROPE_SEMANTICS
2228 if (__result_pos
== size()) __result_pos
= npos
;
2230 return __result_pos
;
2233 iterator
mutable_begin() {
2234 return(iterator(this, 0));
2237 iterator
mutable_end() {
2238 return(iterator(this, size()));
2241 # ifdef __STL_CLASS_PARTIAL_SPECIALIZATION
2242 typedef reverse_iterator
<iterator
> reverse_iterator
;
2243 # else /* __STL_CLASS_PARTIAL_SPECIALIZATION */
2244 typedef reverse_iterator
<iterator
, value_type
, reference
,
2245 difference_type
> reverse_iterator
;
2246 # endif /* __STL_CLASS_PARTIAL_SPECIALIZATION */
2248 reverse_iterator
mutable_rbegin() {
2249 return reverse_iterator(mutable_end());
2252 reverse_iterator
mutable_rend() {
2253 return reverse_iterator(mutable_begin());
2256 reference
mutable_reference_at(size_type __pos
) {
2257 return reference(this, __pos
);
2261 reference
operator[] (size_type __pos
) {
2262 return _char_ref_proxy(this, __pos
);
2265 reference
at(size_type __pos
) {
2266 // if (__pos >= size()) throw out_of_range; // XXX
2267 return (*this)[__pos
];
2270 void resize(size_type __n
, _CharT __c
) {}
2271 void resize(size_type __n
) {}
2272 void reserve(size_type __res_arg
= 0) {}
2273 size_type
capacity() const {
2277 // Stuff below this line is dangerous because it's error prone.
2278 // I would really like to get rid of it.
2279 // copy function with funny arg ordering.
2280 size_type
copy(_CharT
* __buffer
, size_type __n
,
2281 size_type __pos
= 0) const {
2282 return copy(__pos
, __n
, __buffer
);
2285 iterator
end() { return mutable_end(); }
2287 iterator
begin() { return mutable_begin(); }
2289 reverse_iterator
rend() { return mutable_rend(); }
2291 reverse_iterator
rbegin() { return mutable_rbegin(); }
2295 const_iterator
end() { return const_end(); }
2297 const_iterator
begin() { return const_begin(); }
2299 const_reverse_iterator
rend() { return const_rend(); }
2301 const_reverse_iterator
rbegin() { return const_rbegin(); }
2307 template <class _CharT
, class _Alloc
>
2308 const rope
<_CharT
, _Alloc
>::size_type rope
<_CharT
, _Alloc
>::npos
=
2311 template <class _CharT
, class _Alloc
>
2312 inline bool operator== (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2313 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
) {
2314 return (__x
._M_current_pos
== __y
._M_current_pos
&&
2315 __x
._M_root
== __y
._M_root
);
2318 template <class _CharT
, class _Alloc
>
2319 inline bool operator< (const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2320 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
) {
2321 return (__x
._M_current_pos
< __y
._M_current_pos
);
2324 template <class _CharT
, class _Alloc
>
2325 inline ptrdiff_t operator-(const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
,
2326 const _Rope_const_iterator
<_CharT
,_Alloc
>& __y
) {
2327 return (ptrdiff_t)__x
._M_current_pos
- (ptrdiff_t)__y
._M_current_pos
;
2330 template <class _CharT
, class _Alloc
>
2331 inline _Rope_const_iterator
<_CharT
,_Alloc
>
2332 operator-(const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
, ptrdiff_t __n
) {
2333 return _Rope_const_iterator
<_CharT
,_Alloc
>(
2334 __x
._M_root
, __x
._M_current_pos
- __n
);
2337 template <class _CharT
, class _Alloc
>
2338 inline _Rope_const_iterator
<_CharT
,_Alloc
>
2339 operator+(const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
, ptrdiff_t __n
) {
2340 return _Rope_const_iterator
<_CharT
,_Alloc
>(
2341 __x
._M_root
, __x
._M_current_pos
+ __n
);
2344 template <class _CharT
, class _Alloc
>
2345 inline _Rope_const_iterator
<_CharT
,_Alloc
>
2346 operator+(ptrdiff_t __n
, const _Rope_const_iterator
<_CharT
,_Alloc
>& __x
) {
2347 return _Rope_const_iterator
<_CharT
,_Alloc
>(
2348 __x
._M_root
, __x
._M_current_pos
+ __n
);
2351 template <class _CharT
, class _Alloc
>
2352 inline bool operator== (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2353 const _Rope_iterator
<_CharT
,_Alloc
>& __y
) {
2354 return (__x
._M_current_pos
== __y
._M_current_pos
&&
2355 __x
._M_root_rope
== __y
._M_root_rope
);
2358 template <class _CharT
, class _Alloc
>
2359 inline bool operator< (const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2360 const _Rope_iterator
<_CharT
,_Alloc
>& __y
) {
2361 return (__x
._M_current_pos
< __y
._M_current_pos
);
2364 template <class _CharT
, class _Alloc
>
2365 inline ptrdiff_t operator-(const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2366 const _Rope_iterator
<_CharT
,_Alloc
>& __y
) {
2367 return (ptrdiff_t)__x
._M_current_pos
- (ptrdiff_t)__y
._M_current_pos
;
2370 template <class _CharT
, class _Alloc
>
2371 inline _Rope_iterator
<_CharT
,_Alloc
>
2372 operator-(const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2374 return _Rope_iterator
<_CharT
,_Alloc
>(
2375 __x
._M_root_rope
, __x
._M_current_pos
- __n
);
2378 template <class _CharT
, class _Alloc
>
2379 inline _Rope_iterator
<_CharT
,_Alloc
>
2380 operator+(const _Rope_iterator
<_CharT
,_Alloc
>& __x
,
2382 return _Rope_iterator
<_CharT
,_Alloc
>(
2383 __x
._M_root_rope
, __x
._M_current_pos
+ __n
);
2386 template <class _CharT
, class _Alloc
>
2387 inline _Rope_iterator
<_CharT
,_Alloc
>
2388 operator+(ptrdiff_t __n
, const _Rope_iterator
<_CharT
,_Alloc
>& __x
) {
2389 return _Rope_iterator
<_CharT
,_Alloc
>(
2390 __x
._M_root_rope
, __x
._M_current_pos
+ __n
);
2393 template <class _CharT
, class _Alloc
>
2396 operator+ (const rope
<_CharT
,_Alloc
>& __left
,
2397 const rope
<_CharT
,_Alloc
>& __right
)
2399 # ifdef __STL_USE_STD_ALLOCATORS
2400 __stl_assert(__left
.get_allocator() == __right
.get_allocator());
2402 return rope
<_CharT
,_Alloc
>(
2403 rope
<_CharT
,_Alloc
>::_S_concat(__left
._M_tree_ptr
, __right
._M_tree_ptr
));
2404 // Inlining this should make it possible to keep __left and
2405 // __right in registers.
2408 template <class _CharT
, class _Alloc
>
2410 rope
<_CharT
,_Alloc
>&
2411 operator+= (rope
<_CharT
,_Alloc
>& __left
,
2412 const rope
<_CharT
,_Alloc
>& __right
)
2414 __left
.append(__right
);
2418 template <class _CharT
, class _Alloc
>
2421 operator+ (const rope
<_CharT
,_Alloc
>& __left
,
2422 const _CharT
* __right
) {
2423 size_t __rlen
= rope
<_CharT
,_Alloc
>::_S_char_ptr_len(__right
);
2424 return rope
<_CharT
,_Alloc
>(
2425 rope
<_CharT
,_Alloc
>::_S_concat_char_iter(
2426 __left
._M_tree_ptr
, __right
, __rlen
));
2429 template <class _CharT
, class _Alloc
>
2431 rope
<_CharT
,_Alloc
>&
2432 operator+= (rope
<_CharT
,_Alloc
>& __left
,
2433 const _CharT
* __right
) {
2434 __left
.append(__right
);
2438 template <class _CharT
, class _Alloc
>
2441 operator+ (const rope
<_CharT
,_Alloc
>& __left
, _CharT __right
) {
2442 return rope
<_CharT
,_Alloc
>(
2443 rope
<_CharT
,_Alloc
>::_S_concat_char_iter(
2444 __left
._M_tree_ptr
, &__right
, 1));
2447 template <class _CharT
, class _Alloc
>
2449 rope
<_CharT
,_Alloc
>&
2450 operator+= (rope
<_CharT
,_Alloc
>& __left
, _CharT __right
) {
2451 __left
.append(__right
);
2455 template <class _CharT
, class _Alloc
>
2457 operator< (const rope
<_CharT
,_Alloc
>& __left
,
2458 const rope
<_CharT
,_Alloc
>& __right
) {
2459 return __left
.compare(__right
) < 0;
2462 template <class _CharT
, class _Alloc
>
2464 operator== (const rope
<_CharT
,_Alloc
>& __left
,
2465 const rope
<_CharT
,_Alloc
>& __right
) {
2466 return __left
.compare(__right
) == 0;
2469 template <class _CharT
, class _Alloc
>
2470 inline bool operator== (const _Rope_char_ptr_proxy
<_CharT
,_Alloc
>& __x
,
2471 const _Rope_char_ptr_proxy
<_CharT
,_Alloc
>& __y
) {
2472 return (__x
._M_pos
== __y
._M_pos
&& __x
._M_root
== __y
._M_root
);
2475 template<class _CharT
, class _Alloc
>
2476 ostream
& operator<< (ostream
& __o
, const rope
<_CharT
,_Alloc
>& __r
);
2478 typedef rope
<char> crope
;
2479 typedef rope
<wchar_t> wrope
;
2481 inline crope::reference
__mutable_reference_at(crope
& __c
, size_t __i
)
2483 return __c
.mutable_reference_at(__i
);
2486 inline wrope::reference
__mutable_reference_at(wrope
& __c
, size_t __i
)
2488 return __c
.mutable_reference_at(__i
);
2491 #ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
2493 template <class _CharT
, class _Alloc
>
2494 inline void swap(rope
<_CharT
,_Alloc
>& __x
, rope
<_CharT
,_Alloc
>& __y
) {
2500 inline void swap(crope __x
, crope __y
) { __x
.swap(__y
); }
2501 inline void swap(wrope __x
, wrope __y
) { __x
.swap(__y
); }
2503 #endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
2505 // Hash functions should probably be revisited later:
2506 __STL_TEMPLATE_NULL
struct hash
<crope
>
2508 size_t operator()(const crope
& __str
) const
2510 size_t __size
= __str
.size();
2512 if (0 == __size
) return 0;
2513 return 13*__str
[0] + 5*__str
[__size
- 1] + __size
;
2518 __STL_TEMPLATE_NULL
struct hash
<wrope
>
2520 size_t operator()(const wrope
& __str
) const
2522 size_t __size
= __str
.size();
2524 if (0 == __size
) return 0;
2525 return 13*__str
[0] + 5*__str
[__size
- 1] + __size
;
2529 #if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
2530 #pragma reset woff 1174
2535 # include <ropeimpl.h>
2537 # endif /* __SGI_STL_INTERNAL_ROPE_H */