Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
4680 right-hear 1
/*
2
 * Copyright (c) 1997-1998
3
 * Silicon Graphics Computer Systems, Inc.
4
 *
5
 * Permission to use, copy, modify, distribute and sell this software
6
 * and its documentation for any purpose is hereby granted without fee,
7
 * provided that the above copyright notice appear in all copies and
8
 * that both that copyright notice and this permission notice appear
9
 * in supporting documentation.  Silicon Graphics makes no
10
 * representations about the suitability of this software for any
11
 * purpose.  It is provided "as is" without express or implied warranty.
12
 */
13
 
14
/* NOTE: This is an internal header file, included by other STL headers.
15
 *   You should not attempt to use it directly.
16
 */
17
 
18
// rope<_CharT,_Alloc> is a sequence of _CharT.
19
// Ropes appear to be mutable, but update operations
20
// really copy enough of the data structure to leave the original
21
// valid.  Thus ropes can be logically copied by just copying
22
// a pointer value.
23
 
24
#ifndef __SGI_STL_INTERNAL_ROPE_H
25
# define __SGI_STL_INTERNAL_ROPE_H
26
 
27
# ifdef __GC
28
#   define __GC_CONST const
29
# else
30
#   include 
31
#   define __GC_CONST   // constant except for deallocation
32
# endif
33
# ifdef __STL_SGI_THREADS
34
#    include 
35
# endif
36
 
37
namespace std
38
{
39
 
40
// The _S_eos function is used for those functions that
41
// convert to/from C-like strings to detect the end of the string.
42
 
43
// The end-of-C-string character.
44
// This is what the draft standard says it should be.
45
template 
46
inline _CharT _S_eos(_CharT*) { return _CharT(); }
47
 
48
// Test for basic character types.
49
// For basic character types leaves having a trailing eos.
50
template 
51
inline bool _S_is_basic_char_type(_CharT*) { return false; }
52
template 
53
inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
54
 
55
inline bool _S_is_basic_char_type(char*) { return true; }
56
inline bool _S_is_one_byte_char_type(char*) { return true; }
57
inline bool _S_is_basic_char_type(wchar_t*) { return true; }
58
 
59
// Store an eos iff _CharT is a basic character type.
60
// Do not reference _S_eos if it isn't.
61
template 
62
inline void _S_cond_store_eos(_CharT&) {}
63
 
64
inline void _S_cond_store_eos(char& __c) { __c = 0; }
65
inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
66
 
67
// char_producers are logically functions that generate a section of
68
// a string.  These can be convereted to ropes.  The resulting rope
69
// invokes the char_producer on demand.  This allows, for example,
70
// files to be viewed as ropes without reading the entire file.
71
template 
72
class char_producer {
73
    public:
74
        virtual ~char_producer() {};
75
        virtual void operator()(size_t __start_pos, size_t __len,
76
                                _CharT* __buffer) = 0;
77
        // Buffer should really be an arbitrary output iterator.
78
        // That way we could flatten directly into an ostream, etc.
79
        // This is thoroughly impossible, since iterator types don't
80
        // have runtime descriptions.
81
};
82
 
83
// Sequence buffers:
84
//
85
// Sequence must provide an append operation that appends an
86
// array to the sequence.  Sequence buffers are useful only if
87
// appending an entire array is cheaper than appending element by element.
88
// This is true for many string representations.
89
// This should  perhaps inherit from ostream
90
// and be implemented correspondingly, so that they can be used
91
// for formatted.  For the sake of portability, we don't do this yet.
92
//
93
// For now, sequence buffers behave as output iterators.  But they also
94
// behave a little like basic_ostringstream and a
95
// little like containers.
96
 
97
template
98
class sequence_buffer : public output_iterator {
99
    public:
100
        typedef typename _Sequence::value_type value_type;
101
    protected:
102
        _Sequence* _M_prefix;
103
        value_type _M_buffer[_Buf_sz];
104
        size_t     _M_buf_count;
105
    public:
106
        void flush() {
107
            _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
108
            _M_buf_count = 0;
109
        }
110
        ~sequence_buffer() { flush(); }
111
        sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
112
        sequence_buffer(const sequence_buffer& __x) {
113
            _M_prefix = __x._M_prefix;
114
            _M_buf_count = __x._M_buf_count;
115
            copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
116
        }
117
        sequence_buffer(sequence_buffer& __x) {
118
            __x.flush();
119
            _M_prefix = __x._M_prefix;
120
            _M_buf_count = 0;
121
        }
122
        sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
123
        sequence_buffer& operator= (sequence_buffer& __x) {
124
            __x.flush();
125
            _M_prefix = __x._M_prefix;
126
            _M_buf_count = 0;
127
            return *this;
128
        }
129
        sequence_buffer& operator= (const sequence_buffer& __x) {
130
            _M_prefix = __x._M_prefix;
131
            _M_buf_count = __x._M_buf_count;
132
            copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
133
            return *this;
134
        }
135
        void push_back(value_type __x)
136
        {
137
            if (_M_buf_count < _Buf_sz) {
138
                _M_buffer[_M_buf_count] = __x;
139
                ++_M_buf_count;
140
            } else {
141
                flush();
142
                _M_buffer[0] = __x;
143
                _M_buf_count = 1;
144
            }
145
        }
146
        void append(value_type* __s, size_t __len)
147
        {
148
            if (__len + _M_buf_count <= _Buf_sz) {
149
                size_t __i = _M_buf_count;
150
                size_t __j = 0;
151
                for (; __j < __len; __i++, __j++) {
152
                    _M_buffer[__i] = __s[__j];
153
                }
154
                _M_buf_count += __len;
155
            } else if (0 == _M_buf_count) {
156
                _M_prefix->append(__s, __s + __len);
157
            } else {
158
                flush();
159
                append(__s, __len);
160
            }
161
        }
162
        sequence_buffer& write(value_type* __s, size_t __len)
163
        {
164
            append(__s, __len);
165
            return *this;
166
        }
167
        sequence_buffer& put(value_type __x)
168
        {
169
            push_back(__x);
170
            return *this;
171
        }
172
        sequence_buffer& operator=(const value_type& __rhs)
173
        {
174
            push_back(__rhs);
175
            return *this;
176
        }
177
        sequence_buffer& operator*() { return *this; }
178
        sequence_buffer& operator++() { return *this; }
179
        sequence_buffer& operator++(int) { return *this; }
180
};
181
 
182
// The following should be treated as private, at least for now.
183
template
184
class _Rope_char_consumer {
185
    public:
186
        // If we had member templates, these should not be virtual.
187
        // For now we need to use run-time parametrization where
188
        // compile-time would do.  Hence this should all be private
189
        // for now.
190
        // The symmetry with char_producer is accidental and temporary.
191
        virtual ~_Rope_char_consumer() {};
192
        virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
193
};
194
 
195
// First a lot of forward declarations.  The standard seems to require
196
// much stricter "declaration before use" than many of the implementations
197
// that preceded it.
198
template > class rope;
199
template struct _Rope_RopeConcatenation;
200
template struct _Rope_RopeLeaf;
201
template struct _Rope_RopeFunction;
202
template struct _Rope_RopeSubstring;
203
template class _Rope_iterator;
204
template class _Rope_const_iterator;
205
template class _Rope_char_ref_proxy;
206
template class _Rope_char_ptr_proxy;
207
 
208
template
209
bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
210
                 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y);
211
 
212
template
213
_Rope_const_iterator<_CharT,_Alloc> operator-
214
        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
215
         ptrdiff_t __n);
216
 
217
template
218
_Rope_const_iterator<_CharT,_Alloc> operator+
219
        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
220
         ptrdiff_t __n);
221
 
222
template
223
_Rope_const_iterator<_CharT,_Alloc> operator+
224
        (ptrdiff_t __n,
225
         const _Rope_const_iterator<_CharT,_Alloc>& __x);
226
 
227
template
228
bool operator==
229
        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
230
         const _Rope_const_iterator<_CharT,_Alloc>& __y);
231
 
232
template
233
bool operator<
234
        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
235
         const _Rope_const_iterator<_CharT,_Alloc>& __y);
236
 
237
template
238
ptrdiff_t operator-
239
        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
240
         const _Rope_const_iterator<_CharT,_Alloc>& __y);
241
 
242
template
243
_Rope_iterator<_CharT,_Alloc> operator-
244
        (const _Rope_iterator<_CharT,_Alloc>& __x,
245
         ptrdiff_t __n);
246
 
247
template
248
_Rope_iterator<_CharT,_Alloc> operator+
249
        (const _Rope_iterator<_CharT,_Alloc>& __x,
250
         ptrdiff_t __n);
251
 
252
template
253
_Rope_iterator<_CharT,_Alloc> operator+
254
        (ptrdiff_t __n,
255
         const _Rope_iterator<_CharT,_Alloc>& __x);
256
 
257
template
258
bool operator==
259
        (const _Rope_iterator<_CharT,_Alloc>& __x,
260
         const _Rope_iterator<_CharT,_Alloc>& __y);
261
 
262
template
263
bool operator<
264
        (const _Rope_iterator<_CharT,_Alloc>& __x,
265
         const _Rope_iterator<_CharT,_Alloc>& __y);
266
 
267
template
268
ptrdiff_t operator-
269
        (const _Rope_iterator<_CharT,_Alloc>& __x,
270
         const _Rope_iterator<_CharT,_Alloc>& __y);
271
 
272
template
273
rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
274
                               const rope<_CharT,_Alloc>& __right);
275
 
276
template
277
rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
278
                               const _CharT* __right);
279
 
280
template
281
rope<_CharT,_Alloc> operator+ (const rope<_CharT,_Alloc>& __left,
282
                               _CharT __right);
283
 
284
// Some helpers, so we can use power on ropes.
285
// See below for why this isn't local to the implementation.
286
 
287
// This uses a nonstandard refcount convention.
288
// The result has refcount 0.
289
template
290
struct _Rope_Concat_fn
291
       : public binary_function, rope<_CharT,_Alloc>,
292
                                     rope<_CharT,_Alloc> > {
293
        rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
294
                                const rope<_CharT,_Alloc>& __y) {
295
                    return __x + __y;
296
        }
297
};
298
 
299
template 
300
inline
301
rope<_CharT,_Alloc>
302
identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
303
{
304
    return rope<_CharT,_Alloc>();
305
}
306
 
307
 
308
//
309
// What follows should really be local to rope.  Unfortunately,
310
// that doesn't work, since it makes it impossible to define generic
311
// equality on rope iterators.  According to the draft standard, the
312
// template parameters for such an equality operator cannot be inferred
313
// from the occurence of a member class as a parameter.
314
// (SGI compilers in fact allow this, but the __result wouldn't be
315
// portable.)
316
// Similarly, some of the static member functions are member functions
317
// only to avoid polluting the global namespace, and to circumvent
318
// restrictions on type inference for template functions.
319
//
320
 
321
//
322
// The internal data structure for representing a rope.  This is
323
// private to the implementation.  A rope is really just a pointer
324
// to one of these.
325
//
326
// A few basic functions for manipulating this data structure
327
// are members of _RopeRep.  Most of the more complex algorithms
328
// are implemented as rope members.
329
//
330
// Some of the static member functions of _RopeRep have identically
331
// named functions in rope that simply invoke the _RopeRep versions.
332
//
333
// A macro to introduce various allocation and deallocation functions
334
// These need to be defined differently depending on whether or not
335
// we are using standard conforming allocators, and whether the allocator
336
// instances have real state.  Thus this macro is invoked repeatedly
337
// with different definitions of __ROPE_DEFINE_ALLOC.
338
// __ROPE_DEFINE_ALLOC(type,name) defines
339
//   type * name_allocate(size_t) and
340
//   void name_deallocate(tipe *, size_t)
341
// Both functions may or may not be static.
342
 
343
#define __ROPE_DEFINE_ALLOCS(__a) \
344
        __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \
345
        typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
346
        __ROPE_DEFINE_ALLOC(__C,_C) \
347
        typedef _Rope_RopeLeaf<_CharT,__a> __L; \
348
        __ROPE_DEFINE_ALLOC(__L,_L) \
349
        typedef _Rope_RopeFunction<_CharT,__a> __F; \
350
        __ROPE_DEFINE_ALLOC(__F,_F) \
351
        typedef _Rope_RopeSubstring<_CharT,__a> __S; \
352
        __ROPE_DEFINE_ALLOC(__S,_S)
353
 
354
//  Internal rope nodes potentially store a copy of the allocator
355
//  instance used to allocate them.  This is mostly redundant.
356
//  But the alternative would be to pass allocator instances around
357
//  in some form to nearly all internal functions, since any pointer
358
//  assignment may result in a zero reference count and thus require
359
//  deallocation.
360
//  The _Rope_rep_base class encapsulates
361
//  the differences between SGI-style allocators and standard-conforming
362
//  allocators.
363
 
364
#define __STATIC_IF_SGI_ALLOC  /* not static */
365
 
366
// Base class for ordinary allocators.
367
template 
368
class _Rope_rep_alloc_base {
369
public:
370
  typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
371
          allocator_type;
372
  allocator_type get_allocator() const { return _M_data_allocator; }
373
  _Rope_rep_alloc_base(size_t __size, const allocator_type& __a)
374
        : _M_size(__size), _M_data_allocator(__a) {}
375
  size_t _M_size;       // This is here only to avoid wasting space
376
                // for an otherwise empty base class.
377
 
378
 
379
protected:
380
    allocator_type _M_data_allocator;
381
 
382
# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
383
        typedef typename \
384
          _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
385
        /*static*/ _Tp * __name##_allocate(size_t __n) \
386
          { return __name##Allocator(_M_data_allocator).allocate(__n); } \
387
        void __name##_deallocate(_Tp* __p, size_t __n) \
388
          { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
389
  __ROPE_DEFINE_ALLOCS(_Allocator);
390
# undef __ROPE_DEFINE_ALLOC
391
};
392
 
393
// Specialization for allocators that have the property that we don't
394
//  actually have to store an allocator object.
395
template 
396
class _Rope_rep_alloc_base<_CharT,_Allocator,true> {
397
public:
398
  typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
399
          allocator_type;
400
  allocator_type get_allocator() const { return allocator_type(); }
401
  _Rope_rep_alloc_base(size_t __size, const allocator_type&)
402
                : _M_size(__size) {}
403
  size_t _M_size;
404
 
405
protected:
406
 
407
# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
408
        typedef typename \
409
          _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
410
        typedef typename \
411
          _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
412
        static _Tp* __name##_allocate(size_t __n) \
413
                { return __name##Alloc::allocate(__n); } \
414
        void __name##_deallocate(_Tp *__p, size_t __n) \
415
                { __name##Alloc::deallocate(__p, __n); }
416
  __ROPE_DEFINE_ALLOCS(_Allocator);
417
# undef __ROPE_DEFINE_ALLOC
418
};
419
 
420
template 
421
struct _Rope_rep_base
422
  : public _Rope_rep_alloc_base<_CharT,_Alloc,
423
                                _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
424
{
425
  typedef _Rope_rep_alloc_base<_CharT,_Alloc,
426
                               _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
427
          _Base;
428
  typedef typename _Base::allocator_type allocator_type;
429
  _Rope_rep_base(size_t __size, const allocator_type& __a)
430
    : _Base(__size, __a) {}
431
};
432
 
433
 
434
template
435
struct _Rope_RopeRep : public _Rope_rep_base<_CharT,_Alloc>
436
# ifndef __GC
437
    , _Refcount_Base
438
# endif
439
{
440
    public:
441
    enum { _S_max_rope_depth = 45 };
442
    enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
443
    _Tag _M_tag:8;
444
    bool _M_is_balanced:8;
445
    unsigned char _M_depth;
446
    __GC_CONST _CharT* _M_c_string;
447
                        /* Flattened version of string, if needed.  */
448
                        /* typically 0.                             */
449
                        /* If it's not 0, then the memory is owned  */
450
                        /* by this node.                            */
451
                        /* In the case of a leaf, this may point to */
452
                        /* the same memory as the data field.       */
453
    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
454
                        allocator_type;
455
    _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t __size,
456
                  allocator_type __a)
457
        : _Rope_rep_base<_CharT,_Alloc>(__size, __a),
458
#         ifndef __GC
459
          _Refcount_Base(1),
460
#         endif
461
          _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0)
462
    { }
463
#   ifdef __GC
464
        void _M_incr () {}
465
#   endif
466
        static void _S_free_string(__GC_CONST _CharT*, size_t __len,
467
                                   allocator_type __a);
468
#       define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);
469
                        // Deallocate data section of a leaf.
470
                        // This shouldn't be a member function.
471
                        // But its hard to do anything else at the
472
                        // moment, because it's templatized w.r.t.
473
                        // an allocator.
474
                        // Does nothing if __GC is defined.
475
#   ifndef __GC
476
          void _M_free_c_string();
477
          void _M_free_tree();
478
                        // Deallocate t. Assumes t is not 0.
479
          void _M_unref_nonnil()
480
          {
481
              if (0 == _M_decr()) _M_free_tree();
482
          }
483
          void _M_ref_nonnil()
484
          {
485
              _M_incr();
486
          }
487
          static void _S_unref(_Rope_RopeRep* __t)
488
          {
489
              if (0 != __t) {
490
                  __t->_M_unref_nonnil();
491
              }
492
          }
493
          static void _S_ref(_Rope_RopeRep* __t)
494
          {
495
              if (0 != __t) __t->_M_incr();
496
          }
497
          static void _S_free_if_unref(_Rope_RopeRep* __t)
498
          {
499
              if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
500
          }
501
#   else /* __GC */
502
          void _M_unref_nonnil() {}
503
          void _M_ref_nonnil() {}
504
          static void _S_unref(_Rope_RopeRep*) {}
505
          static void _S_ref(_Rope_RopeRep*) {}
506
          static void _S_free_if_unref(_Rope_RopeRep*) {}
507
#   endif
508
 
509
};
510
 
511
template
512
struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
513
  public:
514
    // Apparently needed by VC++
515
    // The data fields of leaves are allocated with some
516
    // extra space, to accomodate future growth and for basic
517
    // character types, to hold a trailing eos character.
518
    enum { _S_alloc_granularity = 8 };
519
    static size_t _S_rounded_up_size(size_t __n) {
520
        size_t __size_with_eos;
521
 
522
        if (_S_is_basic_char_type((_CharT*)0)) {
523
            __size_with_eos = __n + 1;
524
        } else {
525
            __size_with_eos = __n;
526
        }
527
#       ifdef __GC
528
           return __size_with_eos;
529
#       else
530
           // Allow slop for in-place expansion.
531
           return (__size_with_eos + _S_alloc_granularity-1)
532
                        &~ (_S_alloc_granularity-1);
533
#       endif
534
    }
535
    __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
536
                                /* The allocated size is         */
537
                                /* _S_rounded_up_size(size), except */
538
                                /* in the GC case, in which it   */
539
                                /* doesn't matter.               */
540
    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
541
                        allocator_type;
542
    _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t __size, allocator_type __a)
543
        : _Rope_RopeRep<_CharT,_Alloc>(_S_leaf, 0, true, __size, __a),
544
          _M_data(__d)
545
        {
546
        __stl_assert(__size > 0);
547
        if (_S_is_basic_char_type((_CharT *)0)) {
548
            // already eos terminated.
549
            _M_c_string = __d;
550
        }
551
    }
552
        // The constructor assumes that d has been allocated with
553
        // the proper allocator and the properly padded size.
554
        // In contrast, the destructor deallocates the data:
555
# ifndef __GC
556
    ~_Rope_RopeLeaf() {
557
        if (_M_data != _M_c_string) {
558
            _M_free_c_string();
559
        }
560
        __STL_FREE_STRING(_M_data, _M_size, get_allocator());
561
    }
562
# endif
563
};
564
 
565
template
566
struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
567
  public:
568
    _Rope_RopeRep<_CharT,_Alloc>* _M_left;
569
    _Rope_RopeRep<_CharT,_Alloc>* _M_right;
570
    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
571
                        allocator_type;
572
    _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
573
                             _Rope_RopeRep<_CharT,_Alloc>* __r,
574
                             allocator_type __a)
575
 
576
      : _Rope_RopeRep<_CharT,_Alloc>(_S_concat,
577
                                     max(__l->_M_depth, __r->_M_depth) + 1,
578
                                     false,
579
                                     __l->_M_size + __r->_M_size, __a),
580
        _M_left(__l), _M_right(__r)
581
      {}
582
# ifndef __GC
583
    ~_Rope_RopeConcatenation() {
584
        _M_free_c_string();
585
        _M_left->_M_unref_nonnil();
586
        _M_right->_M_unref_nonnil();
587
    }
588
# endif
589
};
590
 
591
template
592
struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
593
  public:
594
    char_producer<_CharT>* _M_fn;
595
#   ifndef __GC
596
      bool _M_delete_when_done; // Char_producer is owned by the
597
                                // rope and should be explicitly
598
                                // deleted when the rope becomes
599
                                // inaccessible.
600
#   else
601
      // In the GC case, we either register the rope for
602
      // finalization, or not.  Thus the field is unnecessary;
603
      // the information is stored in the collector data structures.
604
      // We do need a finalization procedure to be invoked by the
605
      // collector.
606
      static void _S_fn_finalization_proc(void * __tree, void *) {
607
        delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
608
      }
609
#   endif
610
    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
611
                                        allocator_type;
612
    _Rope_RopeFunction(char_producer<_CharT>* __f, size_t __size,
613
                        bool __d, allocator_type __a)
614
      : _Rope_RopeRep<_CharT,_Alloc>(_S_function, 0, true, __size, __a)
615
      , _M_fn(__f)
616
#       ifndef __GC
617
      , _M_delete_when_done(__d)
618
#       endif
619
    {
620
        __stl_assert(__size > 0);
621
#       ifdef __GC
622
            if (__d) {
623
                GC_REGISTER_FINALIZER(
624
                  this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
625
            }
626
#       endif
627
    }
628
# ifndef __GC
629
    ~_Rope_RopeFunction() {
630
          _M_free_c_string();
631
          if (_M_delete_when_done) {
632
              delete _M_fn;
633
          }
634
    }
635
# endif
636
};
637
// Substring results are usually represented using just
638
// concatenation nodes.  But in the case of very long flat ropes
639
// or ropes with a functional representation that isn't practical.
640
// In that case, we represent the __result as a special case of
641
// RopeFunction, whose char_producer points back to the rope itself.
642
// In all cases except repeated substring operations and
643
// deallocation, we treat the __result as a RopeFunction.
644
template
645
struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
646
                             public char_producer<_CharT> {
647
  public:
648
    // XXX this whole class should be rewritten.
649
    _Rope_RopeRep<_CharT,_Alloc>* _M_base;      // not 0
650
    size_t _M_start;
651
    virtual void operator()(size_t __start_pos, size_t __req_len,
652
                            _CharT* __buffer) {
653
        switch(_M_base->_M_tag) {
654
            case _S_function:
655
            case _S_substringfn:
656
              {
657
                char_producer<_CharT>* __fn =
658
                        ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
659
                __stl_assert(__start_pos + __req_len <= _M_size);
660
                __stl_assert(_M_start + _M_size <= _M_base->_M_size);
661
                (*__fn)(__start_pos + _M_start, __req_len, __buffer);
662
              }
663
              break;
664
            case _S_leaf:
665
              {
666
                __GC_CONST _CharT* __s =
667
                        ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
668
                uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
669
                                     __buffer);
670
              }
671
              break;
672
            default:
673
              __stl_assert(false);
674
        }
675
    }
676
    typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type
677
        allocator_type;
678
    _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
679
                          size_t __l, allocator_type __a)
680
      : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
681
        char_producer<_CharT>(),
682
        _M_base(__b),
683
        _M_start(__s)
684
    {
685
        __stl_assert(__l > 0);
686
        __stl_assert(__s + __l <= __b->_M_size);
687
#       ifndef __GC
688
            _M_base->_M_ref_nonnil();
689
#       endif
690
        _M_tag = _S_substringfn;
691
    }
692
    virtual ~_Rope_RopeSubstring()
693
      {
694
#       ifndef __GC
695
          _M_base->_M_unref_nonnil();
696
          // _M_free_c_string();  -- done by parent class
697
#       endif
698
      }
699
};
700
 
701
 
702
// Self-destructing pointers to Rope_rep.
703
// These are not conventional smart pointers.  Their
704
// only purpose in life is to ensure that unref is called
705
// on the pointer either at normal exit or if an exception
706
// is raised.  It is the caller's responsibility to
707
// adjust reference counts when these pointers are initialized
708
// or assigned to.  (This convention significantly reduces
709
// the number of potentially expensive reference count
710
// updates.)
711
#ifndef __GC
712
  template
713
  struct _Rope_self_destruct_ptr {
714
    _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
715
    ~_Rope_self_destruct_ptr()
716
      { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
717
#   ifdef __STL_USE_EXCEPTIONS
718
        _Rope_self_destruct_ptr() : _M_ptr(0) {};
719
#   else
720
        _Rope_self_destruct_ptr() {};
721
#   endif
722
    _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
723
    _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
724
    _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
725
    operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
726
    _Rope_self_destruct_ptr& operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
727
        { _M_ptr = __x; return *this; }
728
  };
729
#endif
730
 
731
// Dereferencing a nonconst iterator has to return something
732
// that behaves almost like a reference.  It's not possible to
733
// return an actual reference since assignment requires extra
734
// work.  And we would get into the same problems as with the
735
// CD2 version of basic_string.
736
template
737
class _Rope_char_ref_proxy {
738
    friend class rope<_CharT,_Alloc>;
739
    friend class _Rope_iterator<_CharT,_Alloc>;
740
    friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
741
#   ifdef __GC
742
        typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
743
#   else
744
        typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
745
#   endif
746
    typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
747
    typedef rope<_CharT,_Alloc> _My_rope;
748
    size_t _M_pos;
749
    _CharT _M_current;
750
    bool _M_current_valid;
751
    _My_rope* _M_root;     // The whole rope.
752
  public:
753
    _Rope_char_ref_proxy(_My_rope* __r, size_t __p)
754
      :  _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
755
    _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x)
756
      : _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
757
        // Don't preserve cache if the reference can outlive the
758
        // expression.  We claim that's not possible without calling
759
        // a copy constructor or generating reference to a proxy
760
        // reference.  We declare the latter to have undefined semantics.
761
    _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
762
      : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
763
    inline operator _CharT () const;
764
    _Rope_char_ref_proxy& operator= (_CharT __c);
765
    _Rope_char_ptr_proxy<_CharT,_Alloc> operator& () const;
766
    _Rope_char_ref_proxy& operator= (const _Rope_char_ref_proxy& __c) {
767
        return operator=((_CharT)__c);
768
    }
769
};
770
 
771
template
772
inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
773
                 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
774
    _CharT __tmp = __a;
775
    __a = __b;
776
    __b = __tmp;
777
}
778
 
779
template
780
class _Rope_char_ptr_proxy {
781
    // XXX this class should be rewritten.
782
    friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
783
    size_t _M_pos;
784
    rope<_CharT,_Alloc>* _M_root;     // The whole rope.
785
  public:
786
    _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
787
      : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
788
    _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x)
789
      : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
790
    _Rope_char_ptr_proxy() {}
791
    _Rope_char_ptr_proxy(_CharT* __x) : _M_root(0), _M_pos(0) {
792
        __stl_assert(0 == __x);
793
    }
794
    _Rope_char_ptr_proxy&
795
    operator= (const _Rope_char_ptr_proxy& __x) {
796
        _M_pos = __x._M_pos;
797
        _M_root = __x._M_root;
798
        return *this;
799
    }
800
    template
801
    friend bool operator== (const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __x,
802
                            const _Rope_char_ptr_proxy<_CharT2,_Alloc2>& __y);
803
    _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
804
        return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
805
    }
806
};
807
 
808
 
809
// Rope iterators:
810
// Unlike in the C version, we cache only part of the stack
811
// for rope iterators, since they must be efficiently copyable.
812
// When we run out of cache, we have to reconstruct the iterator
813
// value.
814
// Pointers from iterators are not included in reference counts.
815
// Iterators are assumed to be thread private.  Ropes can
816
// be shared.
817
 
818
template
819
class _Rope_iterator_base
820
  : public random_access_iterator<_CharT, ptrdiff_t> {
821
    friend class rope<_CharT,_Alloc>;
822
  public:
823
    typedef _Alloc _allocator_type; // used in _Rope_rotate, VC++ workaround
824
    typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
825
        // Borland doesnt want this to be protected.
826
  protected:
827
    enum { _S_path_cache_len = 4 }; // Must be <= 9.
828
    enum { _S_iterator_buf_len = 15 };
829
    size_t _M_current_pos;
830
    _RopeRep* _M_root;     // The whole rope.
831
    size_t _M_leaf_pos;    // Starting position for current leaf
832
    __GC_CONST _CharT* _M_buf_start;
833
                        // Buffer possibly
834
                        // containing current char.
835
    __GC_CONST _CharT* _M_buf_ptr;
836
                        // Pointer to current char in buffer.
837
                        // != 0 ==> buffer valid.
838
    __GC_CONST _CharT* _M_buf_end;
839
                        // One past __last valid char in buffer.
840
    // What follows is the path cache.  We go out of our
841
    // way to make this compact.
842
    // Path_end contains the bottom section of the path from
843
    // the root to the current leaf.
844
    const _RopeRep* _M_path_end[_S_path_cache_len];
845
    int _M_leaf_index;     // Last valid __pos in path_end;
846
                        // _M_path_end[0] ... _M_path_end[leaf_index-1]
847
                        // point to concatenation nodes.
848
    unsigned char _M_path_directions;
849
                          // (path_directions >> __i) & 1 is 1
850
                          // iff we got from _M_path_end[leaf_index - __i - 1]
851
                          // to _M_path_end[leaf_index - __i] by going to the
852
                          // __right. Assumes path_cache_len <= 9.
853
    _CharT _M_tmp_buf[_S_iterator_buf_len];
854
                        // Short buffer for surrounding chars.
855
                        // This is useful primarily for
856
                        // RopeFunctions.  We put the buffer
857
                        // here to avoid locking in the
858
                        // multithreaded case.
859
    // The cached path is generally assumed to be valid
860
    // only if the buffer is valid.
861
    static void _S_setbuf(_Rope_iterator_base& __x);
862
                                        // Set buffer contents given
863
                                        // path cache.
864
    static void _S_setcache(_Rope_iterator_base& __x);
865
                                        // Set buffer contents and
866
                                        // path cache.
867
    static void _S_setcache_for_incr(_Rope_iterator_base& __x);
868
                                        // As above, but assumes path
869
                                        // cache is valid for previous posn.
870
    _Rope_iterator_base() {}
871
    _Rope_iterator_base(_RopeRep* __root, size_t __pos)
872
      : _M_current_pos(__pos), _M_root(__root), _M_buf_ptr(0) {}
873
    void _M_incr(size_t __n);
874
    void _M_decr(size_t __n);
875
  public:
876
    size_t index() const { return _M_current_pos; }
877
    _Rope_iterator_base(const _Rope_iterator_base& __x) {
878
        if (0 != __x._M_buf_ptr) {
879
            *this = __x;
880
        } else {
881
            _M_current_pos = __x._M_current_pos;
882
            _M_root = __x._M_root;
883
            _M_buf_ptr = 0;
884
        }
885
    }
886
};
887
 
888
template class _Rope_iterator;
889
 
890
template
891
class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
892
    friend class rope<_CharT,_Alloc>;
893
  protected:
894
      typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
895
      // The one from the base class may not be directly visible.
896
    _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
897
                   _Rope_iterator_base<_CharT,_Alloc>(
898
                     const_cast<_RopeRep*>(__root), __pos)
899
                   // Only nonconst iterators modify root ref count
900
    {}
901
  public:
902
    typedef _CharT reference;   // Really a value.  Returning a reference
903
                                // Would be a mess, since it would have
904
                                // to be included in refcount.
905
    typedef const _CharT* pointer;
906
 
907
  public:
908
    _Rope_const_iterator() {};
909
    _Rope_const_iterator(const _Rope_const_iterator& __x) :
910
                                _Rope_iterator_base<_CharT,_Alloc>(__x) { }
911
    _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x);
912
    _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
913
        _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) {}
914
    _Rope_const_iterator& operator= (const _Rope_const_iterator& __x) {
915
        if (0 != __x._M_buf_ptr) {
916
            *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
917
        } else {
918
            _M_current_pos = __x._M_current_pos;
919
            _M_root = __x._M_root;
920
            _M_buf_ptr = 0;
921
        }
922
        return(*this);
923
    }
924
    reference operator*() {
925
        if (0 == _M_buf_ptr) _S_setcache(*this);
926
        return *_M_buf_ptr;
927
    }
928
    _Rope_const_iterator& operator++() {
929
        __GC_CONST _CharT* __next;
930
        if (0 != _M_buf_ptr && (__next = _M_buf_ptr + 1) < _M_buf_end) {
931
            _M_buf_ptr = __next;
932
            ++_M_current_pos;
933
        } else {
934
            _M_incr(1);
935
        }
936
        return *this;
937
    }
938
    _Rope_const_iterator& operator+=(ptrdiff_t __n) {
939
        if (__n >= 0) {
940
            _M_incr(__n);
941
        } else {
942
            _M_decr(-__n);
943
        }
944
        return *this;
945
    }
946
    _Rope_const_iterator& operator--() {
947
        _M_decr(1);
948
        return *this;
949
    }
950
    _Rope_const_iterator& operator-=(ptrdiff_t __n) {
951
        if (__n >= 0) {
952
            _M_decr(__n);
953
        } else {
954
            _M_incr(-__n);
955
        }
956
        return *this;
957
    }
958
    _Rope_const_iterator operator++(int) {
959
        size_t __old_pos = _M_current_pos;
960
        _M_incr(1);
961
        return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
962
        // This makes a subsequent dereference expensive.
963
        // Perhaps we should instead copy the iterator
964
        // if it has a valid cache?
965
    }
966
    _Rope_const_iterator operator--(int) {
967
        size_t __old_pos = _M_current_pos;
968
        _M_decr(1);
969
        return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
970
    }
971
    template
972
    friend _Rope_const_iterator<_CharT2,_Alloc2> operator-
973
        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
974
         ptrdiff_t __n);
975
    template
976
    friend _Rope_const_iterator<_CharT2,_Alloc2> operator+
977
        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
978
         ptrdiff_t __n);
979
    template
980
    friend _Rope_const_iterator<_CharT2,_Alloc2> operator+
981
        (ptrdiff_t __n,
982
         const _Rope_const_iterator<_CharT2,_Alloc2>& __x);
983
    reference operator[](size_t __n) {
984
        return rope<_CharT,_Alloc>::_S_fetch(_M_root, _M_current_pos + __n);
985
    }
986
 
987
    template
988
    friend bool operator==
989
        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
990
         const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
991
    template
992
    friend bool operator<
993
        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
994
         const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
995
    template
996
    friend ptrdiff_t operator-
997
        (const _Rope_const_iterator<_CharT2,_Alloc2>& __x,
998
         const _Rope_const_iterator<_CharT2,_Alloc2>& __y);
999
};
1000
 
1001
template
1002
class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1003
    friend class rope<_CharT,_Alloc>;
1004
  protected:
1005
    rope<_CharT,_Alloc>* _M_root_rope;
1006
        // root is treated as a cached version of this,
1007
        // and is used to detect changes to the underlying
1008
        // rope.
1009
        // Root is included in the reference count.
1010
        // This is necessary so that we can detect changes reliably.
1011
        // Unfortunately, it requires careful bookkeeping for the
1012
        // nonGC case.
1013
    _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos)
1014
      : _Rope_iterator_base<_CharT,_Alloc>(__r->_M_tree_ptr, __pos),
1015
        _M_root_rope(__r)
1016
       { _RopeRep::_S_ref(_M_root); if (!(__r -> empty()))_S_setcache(*this); }
1017
 
1018
    void _M_check();
1019
  public:
1020
    typedef _Rope_char_ref_proxy<_CharT,_Alloc>  reference;
1021
    typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
1022
 
1023
  public:
1024
    rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
1025
    _Rope_iterator() {
1026
        _M_root = 0;  // Needed for reference counting.
1027
    };
1028
    _Rope_iterator(const _Rope_iterator& __x) :
1029
        _Rope_iterator_base<_CharT,_Alloc>(__x) {
1030
        _M_root_rope = __x._M_root_rope;
1031
        _RopeRep::_S_ref(_M_root);
1032
    }
1033
    _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
1034
    ~_Rope_iterator() {
1035
        _RopeRep::_S_unref(_M_root);
1036
    }
1037
    _Rope_iterator& operator= (const _Rope_iterator& __x) {
1038
        _RopeRep* __old = _M_root;
1039
 
1040
        _RopeRep::_S_ref(__x._M_root);
1041
        if (0 != __x._M_buf_ptr) {
1042
            _M_root_rope = __x._M_root_rope;
1043
            *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
1044
        } else {
1045
            _M_current_pos = __x._M_current_pos;
1046
            _M_root = __x._M_root;
1047
            _M_root_rope = __x._M_root_rope;
1048
            _M_buf_ptr = 0;
1049
        }
1050
        _RopeRep::_S_unref(__old);
1051
        return(*this);
1052
    }
1053
    reference operator*() {
1054
        _M_check();
1055
        if (0 == _M_buf_ptr) {
1056
            return _Rope_char_ref_proxy<_CharT,_Alloc>(
1057
               _M_root_rope, _M_current_pos);
1058
        } else {
1059
            return _Rope_char_ref_proxy<_CharT,_Alloc>(
1060
               _M_root_rope, _M_current_pos, *_M_buf_ptr);
1061
        }
1062
    }
1063
    _Rope_iterator& operator++() {
1064
        _M_incr(1);
1065
        return *this;
1066
    }
1067
    _Rope_iterator& operator+=(ptrdiff_t __n) {
1068
        if (__n >= 0) {
1069
            _M_incr(__n);
1070
        } else {
1071
            _M_decr(-__n);
1072
        }
1073
        return *this;
1074
    }
1075
    _Rope_iterator& operator--() {
1076
        _M_decr(1);
1077
        return *this;
1078
    }
1079
    _Rope_iterator& operator-=(ptrdiff_t __n) {
1080
        if (__n >= 0) {
1081
            _M_decr(__n);
1082
        } else {
1083
            _M_incr(-__n);
1084
        }
1085
        return *this;
1086
    }
1087
    _Rope_iterator operator++(int) {
1088
        size_t __old_pos = _M_current_pos;
1089
        _M_incr(1);
1090
        return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1091
    }
1092
    _Rope_iterator operator--(int) {
1093
        size_t __old_pos = _M_current_pos;
1094
        _M_decr(1);
1095
        return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1096
    }
1097
    reference operator[](ptrdiff_t __n) {
1098
        return _Rope_char_ref_proxy<_CharT,_Alloc>(
1099
          _M_root_rope, _M_current_pos + __n);
1100
    }
1101
 
1102
    template
1103
    friend bool operator==
1104
        (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1105
         const _Rope_iterator<_CharT2,_Alloc2>& __y);
1106
    template
1107
    friend bool operator<
1108
        (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1109
         const _Rope_iterator<_CharT2,_Alloc2>& __y);
1110
    template
1111
    friend ptrdiff_t operator-
1112
        (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1113
         const _Rope_iterator<_CharT2,_Alloc2>& __y);
1114
    template
1115
    friend _Rope_iterator<_CharT2,_Alloc2> operator-
1116
        (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1117
         ptrdiff_t __n);
1118
    template
1119
    friend _Rope_iterator<_CharT2,_Alloc2> operator+
1120
        (const _Rope_iterator<_CharT2,_Alloc2>& __x,
1121
         ptrdiff_t __n);
1122
    template
1123
    friend _Rope_iterator<_CharT2,_Alloc2> operator+
1124
        (ptrdiff_t __n,
1125
         const _Rope_iterator<_CharT2,_Alloc2>& __x);
1126
};
1127
 
1128
//  The rope base class encapsulates
1129
//  the differences between SGI-style allocators and standard-conforming
1130
//  allocators.
1131
 
1132
// Base class for ordinary allocators.
1133
template 
1134
class _Rope_alloc_base {
1135
public:
1136
  typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep;
1137
  typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
1138
          allocator_type;
1139
  allocator_type get_allocator() const { return _M_data_allocator; }
1140
  _Rope_alloc_base(_RopeRep *__t, const allocator_type& __a)
1141
        : _M_tree_ptr(__t), _M_data_allocator(__a) {}
1142
  _Rope_alloc_base(const allocator_type& __a)
1143
        : _M_data_allocator(__a) {}
1144
 
1145
protected:
1146
  // The only data members of a rope:
1147
    allocator_type _M_data_allocator;
1148
    _RopeRep* _M_tree_ptr;
1149
 
1150
# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1151
        typedef typename \
1152
          _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
1153
        _Tp* __name##_allocate(size_t __n) const \
1154
          { return __name##Allocator(_M_data_allocator).allocate(__n); } \
1155
        void __name##_deallocate(_Tp *__p, size_t __n) const \
1156
                { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
1157
  __ROPE_DEFINE_ALLOCS(_Allocator)
1158
# undef __ROPE_DEFINE_ALLOC
1159
};
1160
 
1161
// Specialization for allocators that have the property that we don't
1162
//  actually have to store an allocator object.
1163
template 
1164
class _Rope_alloc_base<_CharT,_Allocator,true> {
1165
public:
1166
  typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep;
1167
  typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
1168
          allocator_type;
1169
  allocator_type get_allocator() const { return allocator_type(); }
1170
  _Rope_alloc_base(_RopeRep *__t, const allocator_type&)
1171
                : _M_tree_ptr(__t) {}
1172
  _Rope_alloc_base(const allocator_type&) {}
1173
 
1174
protected:
1175
  // The only data member of a rope:
1176
    _RopeRep *_M_tree_ptr;
1177
 
1178
# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1179
        typedef typename \
1180
          _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
1181
        typedef typename \
1182
          _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
1183
        static _Tp* __name##_allocate(size_t __n) \
1184
          { return __name##Alloc::allocate(__n); } \
1185
        static void __name##_deallocate(_Tp *__p, size_t __n) \
1186
          { __name##Alloc::deallocate(__p, __n); }
1187
  __ROPE_DEFINE_ALLOCS(_Allocator)
1188
# undef __ROPE_DEFINE_ALLOC
1189
};
1190
 
1191
template 
1192
struct _Rope_base
1193
  : public _Rope_alloc_base<_CharT,_Alloc,
1194
                            _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
1195
{
1196
  typedef _Rope_alloc_base<_CharT,_Alloc,
1197
                            _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
1198
          _Base;
1199
  typedef typename _Base::allocator_type allocator_type;
1200
  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1201
        // The one in _Base may not be visible due to template rules.
1202
  _Rope_base(_RopeRep* __t, const allocator_type& __a) : _Base(__t, __a) {}
1203
  _Rope_base(const allocator_type& __a) : _Base(__a) {}
1204
};
1205
 
1206
 
1207
template 
1208
class rope : public _Rope_base<_CharT,_Alloc> {
1209
    public:
1210
        typedef _CharT value_type;
1211
        typedef ptrdiff_t difference_type;
1212
        typedef size_t size_type;
1213
        typedef _CharT const_reference;
1214
        typedef const _CharT* const_pointer;
1215
        typedef _Rope_iterator<_CharT,_Alloc> iterator;
1216
        typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1217
        typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1218
        typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1219
 
1220
        friend class _Rope_iterator<_CharT,_Alloc>;
1221
        friend class _Rope_const_iterator<_CharT,_Alloc>;
1222
        friend struct _Rope_RopeRep<_CharT,_Alloc>;
1223
        friend class _Rope_iterator_base<_CharT,_Alloc>;
1224
        friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1225
        friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1226
        friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1227
 
1228
    protected:
1229
        typedef _Rope_base<_CharT,_Alloc> _Base;
1230
        typedef typename _Base::allocator_type allocator_type;
1231
        using _Base::_M_tree_ptr;
1232
        typedef __GC_CONST _CharT* _Cstrptr;
1233
 
1234
        static _CharT _S_empty_c_str[1];
1235
 
1236
        static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
1237
        enum { _S_copy_max = 23 };
1238
                // For strings shorter than _S_copy_max, we copy to
1239
                // concatenate.
1240
 
1241
        typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1242
        typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1243
        typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1244
        typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1245
        typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1246
 
1247
        // Retrieve a character at the indicated position.
1248
        static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1249
 
1250
#       ifndef __GC
1251
            // Obtain a pointer to the character at the indicated position.
1252
            // The pointer can be used to change the character.
1253
            // If such a pointer cannot be produced, as is frequently the
1254
            // case, 0 is returned instead.
1255
            // (Returns nonzero only if all nodes in the path have a refcount
1256
            // of 1.)
1257
            static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1258
#       endif
1259
 
1260
        static bool _S_apply_to_pieces(
1261
                                // should be template parameter
1262
                                _Rope_char_consumer<_CharT>& __c,
1263
                                const _RopeRep* __r,
1264
                                size_t __begin, size_t __end);
1265
                                // begin and end are assumed to be in range.
1266
 
1267
#       ifndef __GC
1268
          static void _S_unref(_RopeRep* __t)
1269
          {
1270
              _RopeRep::_S_unref(__t);
1271
          }
1272
          static void _S_ref(_RopeRep* __t)
1273
          {
1274
              _RopeRep::_S_ref(__t);
1275
          }
1276
#       else /* __GC */
1277
          static void _S_unref(_RopeRep*) {}
1278
          static void _S_ref(_RopeRep*) {}
1279
#       endif
1280
 
1281
 
1282
#       ifdef __GC
1283
            typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1284
#       else
1285
            typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1286
#       endif
1287
 
1288
        // _Result is counted in refcount.
1289
        static _RopeRep* _S_substring(_RopeRep* __base,
1290
                                    size_t __start, size_t __endp1);
1291
 
1292
        static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1293
                                          const _CharT* __iter, size_t __slen);
1294
                // Concatenate rope and char ptr, copying __s.
1295
                // Should really take an arbitrary iterator.
1296
                // Result is counted in refcount.
1297
        static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1298
                                          const _CharT* __iter, size_t __slen)
1299
                // As above, but one reference to __r is about to be
1300
                // destroyed.  Thus the pieces may be recycled if all
1301
                // relevent reference counts are 1.
1302
#           ifdef __GC
1303
                // We can't really do anything since refcounts are unavailable.
1304
                { return _S_concat_char_iter(__r, __iter, __slen); }
1305
#           else
1306
                ;
1307
#           endif
1308
 
1309
        static _RopeRep* _S_concat(_RopeRep* __left, _RopeRep* __right);
1310
                // General concatenation on _RopeRep.  _Result
1311
                // has refcount of 1.  Adjusts argument refcounts.
1312
 
1313
   public:
1314
        void apply_to_pieces( size_t __begin, size_t __end,
1315
                              _Rope_char_consumer<_CharT>& __c) const {
1316
            _S_apply_to_pieces(__c, _M_tree_ptr, __begin, __end);
1317
        }
1318
 
1319
 
1320
   protected:
1321
 
1322
        static size_t _S_rounded_up_size(size_t __n) {
1323
            return _RopeLeaf::_S_rounded_up_size(__n);
1324
        }
1325
 
1326
        static size_t _S_allocated_capacity(size_t __n) {
1327
            if (_S_is_basic_char_type((_CharT*)0)) {
1328
                return _S_rounded_up_size(__n) - 1;
1329
            } else {
1330
                return _S_rounded_up_size(__n);
1331
            }
1332
        }
1333
 
1334
        // Allocate and construct a RopeLeaf using the supplied allocator
1335
        // Takes ownership of s instead of copying.
1336
        static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
1337
                                          size_t __size, allocator_type __a)
1338
        {
1339
            _RopeLeaf* __space = _LAllocator(__a).allocate(1);
1340
            return new(__space) _RopeLeaf(__s, __size, __a);
1341
        }
1342
 
1343
        static _RopeConcatenation* _S_new_RopeConcatenation(
1344
                        _RopeRep* __left, _RopeRep* __right,
1345
                        allocator_type __a)
1346
        {
1347
            _RopeConcatenation* __space = _CAllocator(__a).allocate(1);
1348
            return new(__space) _RopeConcatenation(__left, __right, __a);
1349
        }
1350
 
1351
        static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1352
                size_t __size, bool __d, allocator_type __a)
1353
        {
1354
            _RopeFunction* __space = _FAllocator(__a).allocate(1);
1355
            return new(__space) _RopeFunction(__f, __size, __d, __a);
1356
        }
1357
 
1358
        static _RopeSubstring* _S_new_RopeSubstring(
1359
                _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1360
                size_t __l, allocator_type __a)
1361
        {
1362
            _RopeSubstring* __space = _SAllocator(__a).allocate(1);
1363
            return new(__space) _RopeSubstring(__b, __s, __l, __a);
1364
        }
1365
 
1366
          static
1367
          _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1368
                       size_t __size, allocator_type __a)
1369
#         define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
1370
                _S_RopeLeaf_from_unowned_char_ptr(__s, __size, __a)
1371
        {
1372
            if (0 == __size) return 0;
1373
            _CharT* __buf = __a.allocate(_S_rounded_up_size(__size));
1374
 
1375
            uninitialized_copy_n(__s, __size, __buf);
1376
            _S_cond_store_eos(__buf[__size]);
1377
            __STL_TRY {
1378
              return _S_new_RopeLeaf(__buf, __size, __a);
1379
            }
1380
            __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf, __size, __a))
1381
        }
1382
 
1383
 
1384
        // Concatenation of nonempty strings.
1385
        // Always builds a concatenation node.
1386
        // Rebalances if the result is too deep.
1387
        // Result has refcount 1.
1388
        // Does not increment left and right ref counts even though
1389
        // they are referenced.
1390
        static _RopeRep*
1391
        _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1392
 
1393
        // Concatenation helper functions
1394
        static _RopeLeaf*
1395
        _S_leaf_concat_char_iter(_RopeLeaf* __r,
1396
                                 const _CharT* __iter, size_t __slen);
1397
                // Concatenate by copying leaf.
1398
                // should take an arbitrary iterator
1399
                // result has refcount 1.
1400
#       ifndef __GC
1401
          static _RopeLeaf* _S_destr_leaf_concat_char_iter
1402
                        (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1403
          // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1404
#       endif
1405
 
1406
        private:
1407
 
1408
        static size_t _S_char_ptr_len(const _CharT* __s);
1409
                        // slightly generalized strlen
1410
 
1411
        rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1412
          : _Base(__t,__a) { }
1413
 
1414
 
1415
        // Copy __r to the _CharT buffer.
1416
        // Returns __buffer + __r->_M_size.
1417
        // Assumes that buffer is uninitialized.
1418
        static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1419
 
1420
        // Again, with explicit starting position and length.
1421
        // Assumes that buffer is uninitialized.
1422
        static _CharT* _S_flatten(_RopeRep* __r,
1423
                                  size_t __start, size_t __len,
1424
                                  _CharT* __buffer);
1425
 
1426
        static const unsigned long
1427
          _S_min_len[_RopeRep::_S_max_rope_depth + 1];
1428
 
1429
        static bool _S_is_balanced(_RopeRep* __r)
1430
                { return (__r->_M_size >= _S_min_len[__r->_M_depth]); }
1431
 
1432
        static bool _S_is_almost_balanced(_RopeRep* __r)
1433
                { return (__r->_M_depth == 0 ||
1434
                          __r->_M_size >= _S_min_len[__r->_M_depth - 1]); }
1435
 
1436
        static bool _S_is_roughly_balanced(_RopeRep* __r)
1437
                { return (__r->_M_depth <= 1 ||
1438
                          __r->_M_size >= _S_min_len[__r->_M_depth - 2]); }
1439
 
1440
        // Assumes the result is not empty.
1441
        static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1442
                                                     _RopeRep* __right)
1443
        {
1444
            _RopeRep* __result = _S_concat(__left, __right);
1445
            if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1446
            return __result;
1447
        }
1448
 
1449
        // The basic rebalancing operation.  Logically copies the
1450
        // rope.  The result has refcount of 1.  The client will
1451
        // usually decrement the reference count of __r.
1452
        // The result is within height 2 of balanced by the above
1453
        // definition.
1454
        static _RopeRep* _S_balance(_RopeRep* __r);
1455
 
1456
        // Add all unbalanced subtrees to the forest of balanceed trees.
1457
        // Used only by balance.
1458
        static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1459
 
1460
        // Add __r to forest, assuming __r is already balanced.
1461
        static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1462
 
1463
        // Print to stdout, exposing structure
1464
        static void _S_dump(_RopeRep* __r, int __indent = 0);
1465
 
1466
        // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1467
        static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1468
 
1469
   public:
1470
        bool empty() const { return 0 == _M_tree_ptr; }
1471
 
1472
        // Comparison member function.  This is public only for those
1473
        // clients that need a ternary comparison.  Others
1474
        // should use the comparison operators below.
1475
        int compare(const rope& __y) const {
1476
            return _S_compare(_M_tree_ptr, __y._M_tree_ptr);
1477
        }
1478
 
1479
        rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1480
        : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),
1481
                                                 __a),__a)
1482
        { }
1483
 
1484
        rope(const _CharT* __s, size_t __len,
1485
             const allocator_type& __a = allocator_type())
1486
        : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a), __a)
1487
        { }
1488
 
1489
        // Should perhaps be templatized with respect to the iterator type
1490
        // and use Sequence_buffer.  (It should perhaps use sequence_buffer
1491
        // even now.)
1492
        rope(const _CharT *__s, const _CharT *__e,
1493
             const allocator_type& __a = allocator_type())
1494
        : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a), __a)
1495
        { }
1496
 
1497
        rope(const const_iterator& __s, const const_iterator& __e,
1498
             const allocator_type& __a = allocator_type())
1499
        : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1500
                             __e._M_current_pos), __a)
1501
        { }
1502
 
1503
        rope(const iterator& __s, const iterator& __e,
1504
             const allocator_type& __a = allocator_type())
1505
        : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1506
                             __e._M_current_pos), __a)
1507
        { }
1508
 
1509
        rope(_CharT __c, const allocator_type& __a = allocator_type())
1510
        : _Base(__a)
1511
        {
1512
            _CharT* __buf = _Data_allocate(_S_rounded_up_size(1));
1513
 
1514
            construct(__buf, __c);
1515
            __STL_TRY {
1516
                _M_tree_ptr = _S_new_RopeLeaf(__buf, 1, __a);
1517
            }
1518
            __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf, 1, __a))
1519
        }
1520
 
1521
        rope(size_t __n, _CharT __c,
1522
             const allocator_type& __a = allocator_type());
1523
 
1524
        rope(const allocator_type& __a = allocator_type())
1525
        : _Base(0, __a) {}
1526
 
1527
        // Construct a rope from a function that can compute its members
1528
        rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1529
             const allocator_type& __a = allocator_type())
1530
            : _Base(__a)
1531
        {
1532
            _M_tree_ptr = (0 == __len) ?
1533
 
1534
        }
1535
 
1536
        rope(const rope& __x, const allocator_type& __a = allocator_type())
1537
        : _Base(__x._M_tree_ptr, __a)
1538
        {
1539
            _S_ref(_M_tree_ptr);
1540
        }
1541
 
1542
        ~rope()
1543
        {
1544
            _S_unref(_M_tree_ptr);
1545
        }
1546
 
1547
        rope& operator=(const rope& __x)
1548
        {
1549
            _RopeRep* __old = _M_tree_ptr;
1550
            __stl_assert(get_allocator() == __x.get_allocator());
1551
            _M_tree_ptr = __x._M_tree_ptr;
1552
            _S_ref(_M_tree_ptr);
1553
            _S_unref(__old);
1554
            return(*this);
1555
        }
1556
 
1557
        void clear()
1558
        {
1559
            _S_unref(_M_tree_ptr);
1560
            _M_tree_ptr = 0;
1561
        }
1562
 
1563
        void push_back(_CharT __x)
1564
        {
1565
            _RopeRep* __old = _M_tree_ptr;
1566
            _M_tree_ptr = _S_destr_concat_char_iter(_M_tree_ptr, &__x, 1);
1567
            _S_unref(__old);
1568
        }
1569
 
1570
        void pop_back()
1571
        {
1572
            _RopeRep* __old = _M_tree_ptr;
1573
            _M_tree_ptr =
1574
              _S_substring(_M_tree_ptr, 0, _M_tree_ptr->_M_size - 1);
1575
            _S_unref(__old);
1576
        }
1577
 
1578
        _CharT back() const
1579
        {
1580
            return _S_fetch(_M_tree_ptr, _M_tree_ptr->_M_size - 1);
1581
        }
1582
 
1583
        void push_front(_CharT __x)
1584
        {
1585
            _RopeRep* __old = _M_tree_ptr;
1586
            _RopeRep* __left =
1587
              __STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
1588
            __STL_TRY {
1589
              _M_tree_ptr = _S_concat(__left, _M_tree_ptr);
1590
              _S_unref(__old);
1591
              _S_unref(__left);
1592
            }
1593
            __STL_UNWIND(_S_unref(__left))
1594
        }
1595
 
1596
        void pop_front()
1597
        {
1598
            _RopeRep* __old = _M_tree_ptr;
1599
            _M_tree_ptr = _S_substring(_M_tree_ptr, 1, _M_tree_ptr->_M_size);
1600
            _S_unref(__old);
1601
        }
1602
 
1603
        _CharT front() const
1604
        {
1605
            return _S_fetch(_M_tree_ptr, 0);
1606
        }
1607
 
1608
        void balance()
1609
        {
1610
            _RopeRep* __old = _M_tree_ptr;
1611
            _M_tree_ptr = _S_balance(_M_tree_ptr);
1612
            _S_unref(__old);
1613
        }
1614
 
1615
        void copy(_CharT* __buffer) const {
1616
            destroy(__buffer, __buffer + size());
1617
            _S_flatten(_M_tree_ptr, __buffer);
1618
        }
1619
 
1620
        // This is the copy function from the standard, but
1621
        // with the arguments reordered to make it consistent with the
1622
        // rest of the interface.
1623
        // Note that this guaranteed not to compile if the draft standard
1624
        // order is assumed.
1625
        size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
1626
        {
1627
            size_t __size = size();
1628
            size_t __len = (__pos + __n > __size? __size - __pos : __n);
1629
 
1630
            destroy(__buffer, __buffer + __len);
1631
            _S_flatten(_M_tree_ptr, __pos, __len, __buffer);
1632
            return __len;
1633
        }
1634
 
1635
        // Print to stdout, exposing structure.  May be useful for
1636
        // performance debugging.
1637
        void dump() {
1638
            _S_dump(_M_tree_ptr);
1639
        }
1640
 
1641
        // Convert to 0 terminated string in new allocated memory.
1642
        // Embedded 0s in the input do not terminate the copy.
1643
        const _CharT* c_str() const;
1644
 
1645
        // As above, but lso use the flattened representation as the
1646
        // the new rope representation.
1647
        const _CharT* replace_with_c_str();
1648
 
1649
        // Reclaim memory for the c_str generated flattened string.
1650
        // Intentionally undocumented, since it's hard to say when this
1651
        // is safe for multiple threads.
1652
        void delete_c_str () {
1653
            if (0 == _M_tree_ptr) return;
1654
            if (_RopeRep::_S_leaf == _M_tree_ptr->_M_tag &&
1655
                ((_RopeLeaf*)_M_tree_ptr)->_M_data ==
1656
                      _M_tree_ptr->_M_c_string) {
1657
                // Representation shared
1658
                return;
1659
            }
1660
#           ifndef __GC
1661
              _M_tree_ptr->_M_free_c_string();
1662
#           endif
1663
            _M_tree_ptr->_M_c_string = 0;
1664
        }
1665
 
1666
        _CharT operator[] (size_type __pos) const {
1667
            return _S_fetch(_M_tree_ptr, __pos);
1668
        }
1669
 
1670
        _CharT at(size_type __pos) const {
1671
           // if (__pos >= size()) throw out_of_range;  // XXX
1672
           return (*this)[__pos];
1673
        }
1674
 
1675
        const_iterator begin() const {
1676
            return(const_iterator(_M_tree_ptr, 0));
1677
        }
1678
 
1679
        // An easy way to get a const iterator from a non-const container.
1680
        const_iterator const_begin() const {
1681
            return(const_iterator(_M_tree_ptr, 0));
1682
        }
1683
 
1684
        const_iterator end() const {
1685
            return(const_iterator(_M_tree_ptr, size()));
1686
        }
1687
 
1688
        const_iterator const_end() const {
1689
            return(const_iterator(_M_tree_ptr, size()));
1690
        }
1691
 
1692
        size_type size() const {
1693
            return(0 == _M_tree_ptr? 0 : _M_tree_ptr->_M_size);
1694
        }
1695
 
1696
        size_type length() const {
1697
            return size();
1698
        }
1699
 
1700
        size_type max_size() const {
1701
            return _S_min_len[_RopeRep::_S_max_rope_depth-1] - 1;
1702
            //  Guarantees that the result can be sufficirntly
1703
            //  balanced.  Longer ropes will probably still work,
1704
            //  but it's harder to make guarantees.
1705
        }
1706
 
1707
        typedef reverse_iterator const_reverse_iterator;
1708
 
1709
        const_reverse_iterator rbegin() const {
1710
            return const_reverse_iterator(end());
1711
        }
1712
 
1713
        const_reverse_iterator const_rbegin() const {
1714
            return const_reverse_iterator(end());
1715
        }
1716
 
1717
        const_reverse_iterator rend() const {
1718
            return const_reverse_iterator(begin());
1719
        }
1720
 
1721
        const_reverse_iterator const_rend() const {
1722
            return const_reverse_iterator(begin());
1723
        }
1724
 
1725
        template
1726
        friend rope<_CharT2,_Alloc2>
1727
        operator+ (const rope<_CharT2,_Alloc2>& __left,
1728
                   const rope<_CharT2,_Alloc2>& __right);
1729
 
1730
        template
1731
        friend rope<_CharT2,_Alloc2>
1732
        operator+ (const rope<_CharT2,_Alloc2>& __left,
1733
                   const _CharT2* __right);
1734
 
1735
        template
1736
        friend rope<_CharT2,_Alloc2>
1737
        operator+ (const rope<_CharT2,_Alloc2>& __left, _CharT2 __right);
1738
        // The symmetric cases are intentionally omitted, since they're presumed
1739
        // to be less common, and we don't handle them as well.
1740
 
1741
        // The following should really be templatized.
1742
        // The first argument should be an input iterator or
1743
        // forward iterator with value_type _CharT.
1744
        rope& append(const _CharT* __iter, size_t __n) {
1745
            _RopeRep* __result =
1746
              _S_destr_concat_char_iter(_M_tree_ptr, __iter, __n);
1747
            _S_unref(_M_tree_ptr);
1748
            _M_tree_ptr = __result;
1749
            return *this;
1750
        }
1751
 
1752
        rope& append(const _CharT* __c_string) {
1753
            size_t __len = _S_char_ptr_len(__c_string);
1754
            append(__c_string, __len);
1755
            return(*this);
1756
        }
1757
 
1758
        rope& append(const _CharT* __s, const _CharT* __e) {
1759
            _RopeRep* __result =
1760
                _S_destr_concat_char_iter(_M_tree_ptr, __s, __e - __s);
1761
            _S_unref(_M_tree_ptr);
1762
            _M_tree_ptr = __result;
1763
            return *this;
1764
        }
1765
 
1766
        rope& append(const_iterator __s, const_iterator __e) {
1767
            __stl_assert(__s._M_root == __e._M_root);
1768
            __stl_assert(get_allocator() == __s._M_root->get_allocator());
1769
            _Self_destruct_ptr __appendee(_S_substring(
1770
              __s._M_root, __s._M_current_pos, __e._M_current_pos));
1771
            _RopeRep* __result =
1772
              _S_concat(_M_tree_ptr, (_RopeRep*)__appendee);
1773
            _S_unref(_M_tree_ptr);
1774
            _M_tree_ptr = __result;
1775
            return *this;
1776
        }
1777
 
1778
        rope& append(_CharT __c) {
1779
            _RopeRep* __result =
1780
              _S_destr_concat_char_iter(_M_tree_ptr, &__c, 1);
1781
            _S_unref(_M_tree_ptr);
1782
            _M_tree_ptr = __result;
1783
            return *this;
1784
        }
1785
 
1786
        rope& append() { return append(_CharT()); }  // XXX why?
1787
 
1788
        rope& append(const rope& __y) {
1789
            __stl_assert(__y.get_allocator() == get_allocator());
1790
            _RopeRep* __result = _S_concat(_M_tree_ptr, __y._M_tree_ptr);
1791
            _S_unref(_M_tree_ptr);
1792
            _M_tree_ptr = __result;
1793
            return *this;
1794
        }
1795
 
1796
        rope& append(size_t __n, _CharT __c) {
1797
            rope<_CharT,_Alloc> __last(__n, __c);
1798
            return append(__last);
1799
        }
1800
 
1801
        void swap(rope& __b) {
1802
            __stl_assert(get_allocator() == __b.get_allocator());
1803
            _RopeRep* __tmp = _M_tree_ptr;
1804
            _M_tree_ptr = __b._M_tree_ptr;
1805
            __b._M_tree_ptr = __tmp;
1806
        }
1807
 
1808
 
1809
    protected:
1810
        // Result is included in refcount.
1811
        static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1812
                                  size_t __pos2, _RopeRep* __r) {
1813
            if (0 == __old) { _S_ref(__r); return __r; }
1814
            _Self_destruct_ptr __left(
1815
              _S_substring(__old, 0, __pos1));
1816
            _Self_destruct_ptr __right(
1817
              _S_substring(__old, __pos2, __old->_M_size));
1818
            _RopeRep* __result;
1819
 
1820
            __stl_assert(__old->get_allocator() == __r->get_allocator());
1821
            if (0 == __r) {
1822
                __result = _S_concat(__left, __right);
1823
            } else {
1824
                _Self_destruct_ptr __left_result(_S_concat(__left, __r));
1825
                __result = _S_concat(__left_result, __right);
1826
            }
1827
            return __result;
1828
        }
1829
 
1830
    public:
1831
        void insert(size_t __p, const rope& __r) {
1832
            _RopeRep* __result =
1833
              replace(_M_tree_ptr, __p, __p, __r._M_tree_ptr);
1834
            __stl_assert(get_allocator() == __r.get_allocator());
1835
            _S_unref(_M_tree_ptr);
1836
            _M_tree_ptr = __result;
1837
        }
1838
 
1839
        void insert(size_t __p, size_t __n, _CharT __c) {
1840
            rope<_CharT,_Alloc> __r(__n,__c);
1841
            insert(__p, __r);
1842
        }
1843
 
1844
        void insert(size_t __p, const _CharT* __i, size_t __n) {
1845
            _Self_destruct_ptr __left(_S_substring(_M_tree_ptr, 0, __p));
1846
            _Self_destruct_ptr __right(_S_substring(_M_tree_ptr, __p, size()));
1847
            _Self_destruct_ptr __left_result(
1848
              _S_concat_char_iter(__left, __i, __n));
1849
                // _S_ destr_concat_char_iter should be safe here.
1850
                // But as it stands it's probably not a win, since __left
1851
                // is likely to have additional references.
1852
            _RopeRep* __result = _S_concat(__left_result, __right);
1853
            _S_unref(_M_tree_ptr);
1854
            _M_tree_ptr = __result;
1855
        }
1856
 
1857
        void insert(size_t __p, const _CharT* __c_string) {
1858
            insert(__p, __c_string, _S_char_ptr_len(__c_string));
1859
        }
1860
 
1861
        void insert(size_t __p, _CharT __c) {
1862
            insert(__p, &__c, 1);
1863
        }
1864
 
1865
        void insert(size_t __p) {
1866
            _CharT __c = _CharT();
1867
            insert(__p, &__c, 1);
1868
        }
1869
 
1870
        void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
1871
            rope __r(__i, __j);
1872
            insert(__p, __r);
1873
        }
1874
 
1875
        void insert(size_t __p, const const_iterator& __i,
1876
                              const const_iterator& __j) {
1877
            rope __r(__i, __j);
1878
            insert(__p, __r);
1879
        }
1880
 
1881
        void insert(size_t __p, const iterator& __i,
1882
                              const iterator& __j) {
1883
            rope __r(__i, __j);
1884
            insert(__p, __r);
1885
        }
1886
 
1887
        // (position, length) versions of replace operations:
1888
 
1889
        void replace(size_t __p, size_t __n, const rope& __r) {
1890
            _RopeRep* __result =
1891
              replace(_M_tree_ptr, __p, __p + __n, __r._M_tree_ptr);
1892
            _S_unref(_M_tree_ptr);
1893
            _M_tree_ptr = __result;
1894
        }
1895
 
1896
        void replace(size_t __p, size_t __n,
1897
                     const _CharT* __i, size_t __i_len) {
1898
            rope __r(__i, __i_len);
1899
            replace(__p, __n, __r);
1900
        }
1901
 
1902
        void replace(size_t __p, size_t __n, _CharT __c) {
1903
            rope __r(__c);
1904
            replace(__p, __n, __r);
1905
        }
1906
 
1907
        void replace(size_t __p, size_t __n, const _CharT* __c_string) {
1908
            rope __r(__c_string);
1909
            replace(__p, __n, __r);
1910
        }
1911
 
1912
        void replace(size_t __p, size_t __n,
1913
                     const _CharT* __i, const _CharT* __j) {
1914
            rope __r(__i, __j);
1915
            replace(__p, __n, __r);
1916
        }
1917
 
1918
        void replace(size_t __p, size_t __n,
1919
                     const const_iterator& __i, const const_iterator& __j) {
1920
            rope __r(__i, __j);
1921
            replace(__p, __n, __r);
1922
        }
1923
 
1924
        void replace(size_t __p, size_t __n,
1925
                     const iterator& __i, const iterator& __j) {
1926
            rope __r(__i, __j);
1927
            replace(__p, __n, __r);
1928
        }
1929
 
1930
        // Single character variants:
1931
        void replace(size_t __p, _CharT __c) {
1932
            iterator __i(this, __p);
1933
            *__i = __c;
1934
        }
1935
 
1936
        void replace(size_t __p, const rope& __r) {
1937
            replace(__p, 1, __r);
1938
        }
1939
 
1940
        void replace(size_t __p, const _CharT* __i, size_t __i_len) {
1941
            replace(__p, 1, __i, __i_len);
1942
        }
1943
 
1944
        void replace(size_t __p, const _CharT* __c_string) {
1945
            replace(__p, 1, __c_string);
1946
        }
1947
 
1948
        void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
1949
            replace(__p, 1, __i, __j);
1950
        }
1951
 
1952
        void replace(size_t __p, const const_iterator& __i,
1953
                               const const_iterator& __j) {
1954
            replace(__p, 1, __i, __j);
1955
        }
1956
 
1957
        void replace(size_t __p, const iterator& __i,
1958
                               const iterator& __j) {
1959
            replace(__p, 1, __i, __j);
1960
        }
1961
 
1962
        // Erase, (position, size) variant.
1963
        void erase(size_t __p, size_t __n) {
1964
            _RopeRep* __result = replace(_M_tree_ptr, __p, __p + __n, 0);
1965
            _S_unref(_M_tree_ptr);
1966
            _M_tree_ptr = __result;
1967
        }
1968
 
1969
        // Erase, single character
1970
        void erase(size_t __p) {
1971
            erase(__p, __p + 1);
1972
        }
1973
 
1974
        // Insert, iterator variants.
1975
        iterator insert(const iterator& __p, const rope& __r)
1976
                { insert(__p.index(), __r); return __p; }
1977
        iterator insert(const iterator& __p, size_t __n, _CharT __c)
1978
                { insert(__p.index(), __n, __c); return __p; }
1979
        iterator insert(const iterator& __p, _CharT __c)
1980
                { insert(__p.index(), __c); return __p; }
1981
        iterator insert(const iterator& __p )
1982
                { insert(__p.index()); return __p; }
1983
        iterator insert(const iterator& __p, const _CharT* c_string)
1984
                { insert(__p.index(), c_string); return __p; }
1985
        iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
1986
                { insert(__p.index(), __i, __n); return __p; }
1987
        iterator insert(const iterator& __p, const _CharT* __i,
1988
                        const _CharT* __j)
1989
                { insert(__p.index(), __i, __j);  return __p; }
1990
        iterator insert(const iterator& __p,
1991
                        const const_iterator& __i, const const_iterator& __j)
1992
                { insert(__p.index(), __i, __j); return __p; }
1993
        iterator insert(const iterator& __p,
1994
                        const iterator& __i, const iterator& __j)
1995
                { insert(__p.index(), __i, __j); return __p; }
1996
 
1997
        // Replace, range variants.
1998
        void replace(const iterator& __p, const iterator& __q,
1999
                     const rope& __r)
2000
                { replace(__p.index(), __q.index() - __p.index(), __r); }
2001
        void replace(const iterator& __p, const iterator& __q, _CharT __c)
2002
                { replace(__p.index(), __q.index() - __p.index(), __c); }
2003
        void replace(const iterator& __p, const iterator& __q,
2004
                     const _CharT* __c_string)
2005
                { replace(__p.index(), __q.index() - __p.index(), __c_string); }
2006
        void replace(const iterator& __p, const iterator& __q,
2007
                     const _CharT* __i, size_t __n)
2008
                { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
2009
        void replace(const iterator& __p, const iterator& __q,
2010
                     const _CharT* __i, const _CharT* __j)
2011
                { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2012
        void replace(const iterator& __p, const iterator& __q,
2013
                     const const_iterator& __i, const const_iterator& __j)
2014
                { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2015
        void replace(const iterator& __p, const iterator& __q,
2016
                     const iterator& __i, const iterator& __j)
2017
                { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2018
 
2019
        // Replace, iterator variants.
2020
        void replace(const iterator& __p, const rope& __r)
2021
                { replace(__p.index(), __r); }
2022
        void replace(const iterator& __p, _CharT __c)
2023
                { replace(__p.index(), __c); }
2024
        void replace(const iterator& __p, const _CharT* __c_string)
2025
                { replace(__p.index(), __c_string); }
2026
        void replace(const iterator& __p, const _CharT* __i, size_t __n)
2027
                { replace(__p.index(), __i, __n); }
2028
        void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
2029
                { replace(__p.index(), __i, __j); }
2030
        void replace(const iterator& __p, const_iterator __i,
2031
                     const_iterator __j)
2032
                { replace(__p.index(), __i, __j); }
2033
        void replace(const iterator& __p, iterator __i, iterator __j)
2034
                { replace(__p.index(), __i, __j); }
2035
 
2036
        // Iterator and range variants of erase
2037
        iterator erase(const iterator& __p, const iterator& __q) {
2038
            size_t __p_index = __p.index();
2039
            erase(__p_index, __q.index() - __p_index);
2040
            return iterator(this, __p_index);
2041
        }
2042
        iterator erase(const iterator& __p) {
2043
            size_t __p_index = __p.index();
2044
            erase(__p_index, 1);
2045
            return iterator(this, __p_index);
2046
        }
2047
 
2048
        rope substr(size_t __start, size_t __len = 1) const {
2049
            return rope<_CharT,_Alloc>(
2050
                        _S_substring(_M_tree_ptr, __start, __start + __len));
2051
        }
2052
 
2053
        rope substr(iterator __start, iterator __end) const {
2054
            return rope<_CharT,_Alloc>(
2055
                _S_substring(_M_tree_ptr, __start.index(), __end.index()));
2056
        }
2057
 
2058
        rope substr(iterator __start) const {
2059
            size_t __pos = __start.index();
2060
            return rope<_CharT,_Alloc>(
2061
                        _S_substring(_M_tree_ptr, __pos, __pos + 1));
2062
        }
2063
 
2064
        rope substr(const_iterator __start, const_iterator __end) const {
2065
            // This might eventually take advantage of the cache in the
2066
            // iterator.
2067
            return rope<_CharT,_Alloc>(
2068
              _S_substring(_M_tree_ptr, __start.index(), __end.index()));
2069
        }
2070
 
2071
        rope<_CharT,_Alloc> substr(const_iterator __start) {
2072
            size_t __pos = __start.index();
2073
            return rope<_CharT,_Alloc>(
2074
              _S_substring(_M_tree_ptr, __pos, __pos + 1));
2075
        }
2076
 
2077
        static const size_type npos;
2078
 
2079
        size_type find(_CharT __c, size_type __pos = 0) const;
2080
        size_type find(const _CharT* __s, size_type __pos = 0) const {
2081
            size_type __result_pos;
2082
            const_iterator __result = search(const_begin() + __pos, const_end(),
2083
                                           __s, __s + _S_char_ptr_len(__s));
2084
            __result_pos = __result.index();
2085
#           ifndef __STL_OLD_ROPE_SEMANTICS
2086
                if (__result_pos == size()) __result_pos = npos;
2087
#           endif
2088
            return __result_pos;
2089
        }
2090
 
2091
        iterator mutable_begin() {
2092
            return(iterator(this, 0));
2093
        }
2094
 
2095
        iterator mutable_end() {
2096
            return(iterator(this, size()));
2097
        }
2098
 
2099
        typedef reverse_iterator reverse_iterator;
2100
 
2101
        reverse_iterator mutable_rbegin() {
2102
            return reverse_iterator(mutable_end());
2103
        }
2104
 
2105
        reverse_iterator mutable_rend() {
2106
            return reverse_iterator(mutable_begin());
2107
        }
2108
 
2109
        reference mutable_reference_at(size_type __pos) {
2110
            return reference(this, __pos);
2111
        }
2112
 
2113
#       ifdef __STD_STUFF
2114
            reference operator[] (size_type __pos) {
2115
                return _char_ref_proxy(this, __pos);
2116
            }
2117
 
2118
            reference at(size_type __pos) {
2119
                // if (__pos >= size()) throw out_of_range;  // XXX
2120
                return (*this)[__pos];
2121
            }
2122
 
2123
            void resize(size_type __n, _CharT __c) {}
2124
            void resize(size_type __n) {}
2125
            void reserve(size_type __res_arg = 0) {}
2126
            size_type capacity() const {
2127
                return max_size();
2128
            }
2129
 
2130
          // Stuff below this line is dangerous because it's error prone.
2131
          // I would really like to get rid of it.
2132
            // copy function with funny arg ordering.
2133
              size_type copy(_CharT* __buffer, size_type __n,
2134
                             size_type __pos = 0) const {
2135
                return copy(__pos, __n, __buffer);
2136
              }
2137
 
2138
            iterator end() { return mutable_end(); }
2139
 
2140
            iterator begin() { return mutable_begin(); }
2141
 
2142
            reverse_iterator rend() { return mutable_rend(); }
2143
 
2144
            reverse_iterator rbegin() { return mutable_rbegin(); }
2145
 
2146
#       else
2147
 
2148
            const_iterator end() { return const_end(); }
2149
 
2150
            const_iterator begin() { return const_begin(); }
2151
 
2152
            const_reverse_iterator rend() { return const_rend(); }
2153
 
2154
            const_reverse_iterator rbegin() { return const_rbegin(); }
2155
 
2156
#       endif
2157
 
2158
};
2159
 
2160
template 
2161
const rope<_CharT, _Alloc>::size_type rope<_CharT, _Alloc>::npos =
2162
                        (size_type)(-1);
2163
 
2164
template 
2165
inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2166
                        const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2167
  return (__x._M_current_pos == __y._M_current_pos &&
2168
          __x._M_root == __y._M_root);
2169
}
2170
 
2171
template 
2172
inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2173
                       const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2174
  return (__x._M_current_pos < __y._M_current_pos);
2175
}
2176
 
2177
template 
2178
inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2179
                        const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2180
  return !(__x == __y);
2181
}
2182
 
2183
template 
2184
inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2185
                       const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2186
  return __y < __x;
2187
}
2188
 
2189
template 
2190
inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2191
                        const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2192
  return !(__y < __x);
2193
}
2194
 
2195
template 
2196
inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2197
                        const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2198
  return !(__x < __y);
2199
}
2200
 
2201
template 
2202
inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
2203
                           const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2204
  return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2205
}
2206
 
2207
template 
2208
inline _Rope_const_iterator<_CharT,_Alloc>
2209
operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2210
  return _Rope_const_iterator<_CharT,_Alloc>(
2211
            __x._M_root, __x._M_current_pos - __n);
2212
}
2213
 
2214
template 
2215
inline _Rope_const_iterator<_CharT,_Alloc>
2216
operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2217
  return _Rope_const_iterator<_CharT,_Alloc>(
2218
           __x._M_root, __x._M_current_pos + __n);
2219
}
2220
 
2221
template 
2222
inline _Rope_const_iterator<_CharT,_Alloc>
2223
operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
2224
  return _Rope_const_iterator<_CharT,_Alloc>(
2225
           __x._M_root, __x._M_current_pos + __n);
2226
}
2227
 
2228
template 
2229
inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
2230
                        const _Rope_iterator<_CharT,_Alloc>& __y) {
2231
  return (__x._M_current_pos == __y._M_current_pos &&
2232
          __x._M_root_rope == __y._M_root_rope);
2233
}
2234
 
2235
template 
2236
inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
2237
                       const _Rope_iterator<_CharT,_Alloc>& __y) {
2238
  return (__x._M_current_pos < __y._M_current_pos);
2239
}
2240
 
2241
template 
2242
inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
2243
                        const _Rope_iterator<_CharT,_Alloc>& __y) {
2244
  return !(__x == __y);
2245
}
2246
 
2247
template 
2248
inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
2249
                       const _Rope_iterator<_CharT,_Alloc>& __y) {
2250
  return __y < __x;
2251
}
2252
 
2253
template 
2254
inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
2255
                        const _Rope_iterator<_CharT,_Alloc>& __y) {
2256
  return !(__y < __x);
2257
}
2258
 
2259
template 
2260
inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
2261
                        const _Rope_iterator<_CharT,_Alloc>& __y) {
2262
  return !(__x < __y);
2263
}
2264
 
2265
template 
2266
inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2267
                           const _Rope_iterator<_CharT,_Alloc>& __y) {
2268
  return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2269
}
2270
 
2271
template 
2272
inline _Rope_iterator<_CharT,_Alloc>
2273
operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2274
          ptrdiff_t __n) {
2275
  return _Rope_iterator<_CharT,_Alloc>(
2276
    __x._M_root_rope, __x._M_current_pos - __n);
2277
}
2278
 
2279
template 
2280
inline _Rope_iterator<_CharT,_Alloc>
2281
operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
2282
          ptrdiff_t __n) {
2283
  return _Rope_iterator<_CharT,_Alloc>(
2284
    __x._M_root_rope, __x._M_current_pos + __n);
2285
}
2286
 
2287
template 
2288
inline _Rope_iterator<_CharT,_Alloc>
2289
operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
2290
  return _Rope_iterator<_CharT,_Alloc>(
2291
    __x._M_root_rope, __x._M_current_pos + __n);
2292
}
2293
 
2294
template 
2295
inline
2296
rope<_CharT,_Alloc>
2297
operator+ (const rope<_CharT,_Alloc>& __left,
2298
           const rope<_CharT,_Alloc>& __right)
2299
{
2300
    __stl_assert(__left.get_allocator() == __right.get_allocator());
2301
    return rope<_CharT,_Alloc>(
2302
      rope<_CharT,_Alloc>::_S_concat(__left._M_tree_ptr, __right._M_tree_ptr));
2303
    // Inlining this should make it possible to keep __left and
2304
    // __right in registers.
2305
}
2306
 
2307
template 
2308
inline
2309
rope<_CharT,_Alloc>&
2310
operator+= (rope<_CharT,_Alloc>& __left,
2311
      const rope<_CharT,_Alloc>& __right)
2312
{
2313
    __left.append(__right);
2314
    return __left;
2315
}
2316
 
2317
template 
2318
inline
2319
rope<_CharT,_Alloc>
2320
operator+ (const rope<_CharT,_Alloc>& __left,
2321
           const _CharT* __right) {
2322
    size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
2323
    return rope<_CharT,_Alloc>(
2324
      rope<_CharT,_Alloc>::_S_concat_char_iter(
2325
        __left._M_tree_ptr, __right, __rlen));
2326
}
2327
 
2328
template 
2329
inline
2330
rope<_CharT,_Alloc>&
2331
operator+= (rope<_CharT,_Alloc>& __left,
2332
            const _CharT* __right) {
2333
    __left.append(__right);
2334
    return __left;
2335
}
2336
 
2337
template 
2338
inline
2339
rope<_CharT,_Alloc>
2340
operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
2341
    return rope<_CharT,_Alloc>(
2342
      rope<_CharT,_Alloc>::_S_concat_char_iter(
2343
        __left._M_tree_ptr, &__right, 1));
2344
}
2345
 
2346
template 
2347
inline
2348
rope<_CharT,_Alloc>&
2349
operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
2350
    __left.append(__right);
2351
    return __left;
2352
}
2353
 
2354
template 
2355
bool
2356
operator< (const rope<_CharT,_Alloc>& __left,
2357
           const rope<_CharT,_Alloc>& __right) {
2358
    return __left.compare(__right) < 0;
2359
}
2360
 
2361
template 
2362
bool
2363
operator== (const rope<_CharT,_Alloc>& __left,
2364
            const rope<_CharT,_Alloc>& __right) {
2365
    return __left.compare(__right) == 0;
2366
}
2367
 
2368
template 
2369
inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2370
                        const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2371
        return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
2372
}
2373
 
2374
template 
2375
inline bool
2376
operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2377
  return !(__x == __y);
2378
}
2379
 
2380
template 
2381
inline bool
2382
operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2383
  return __y < __x;
2384
}
2385
 
2386
template 
2387
inline bool
2388
operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2389
  return !(__y < __x);
2390
}
2391
 
2392
template 
2393
inline bool
2394
operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
2395
  return !(__x < __y);
2396
}
2397
 
2398
template 
2399
inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2400
                        const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2401
  return !(__x == __y);
2402
}
2403
 
2404
template
2405
basic_ostream<_CharT, _Traits>& operator<<
2406
                                        (basic_ostream<_CharT, _Traits>& __o,
2407
                                         const rope<_CharT, _Alloc>& __r);
2408
 
2409
typedef rope crope;
2410
typedef rope wrope;
2411
 
2412
inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
2413
{
2414
    return __c.mutable_reference_at(__i);
2415
}
2416
 
2417
inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
2418
{
2419
    return __c.mutable_reference_at(__i);
2420
}
2421
 
2422
template 
2423
inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
2424
  __x.swap(__y);
2425
}
2426
 
2427
// Hash functions should probably be revisited later:
2428
template<> struct hash
2429
{
2430
  size_t operator()(const crope& __str) const
2431
  {
2432
    size_t __size = __str.size();
2433
 
2434
    if (0 == __size) return 0;
2435
    return 13*__str[0] + 5*__str[__size - 1] + __size;
2436
  }
2437
};
2438
 
2439
 
2440
template<> struct hash
2441
{
2442
  size_t operator()(const wrope& __str) const
2443
  {
2444
    size_t __size = __str.size();
2445
 
2446
    if (0 == __size) return 0;
2447
    return 13*__str[0] + 5*__str[__size - 1] + __size;
2448
  }
2449
};
2450
 
2451
} // namespace std
2452
 
2453
# include 
2454
 
2455
# endif /* __SGI_STL_INTERNAL_ROPE_H */
2456
 
2457
// Local Variables:
2458
// mode:C++
2459
// End: