Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
4680 right-hear 1
/*
2
 * Copyright (c) 1996-1997
3
 * Silicon Graphics Computer Systems, Inc.
4
 *
5
 * Permission to use, copy, modify, distribute and sell this software
6
 * and its documentation for any purpose is hereby granted without fee,
7
 * provided that the above copyright notice appear in all copies and
8
 * that both that copyright notice and this permission notice appear
9
 * in supporting documentation.  Silicon Graphics makes no
10
 * representations about the suitability of this software for any
11
 * purpose.  It is provided "as is" without express or implied warranty.
12
 */
13
 
14
/* NOTE: This is an internal header file, included by other STL headers.
15
 *   You should not attempt to use it directly.
16
 */
17
 
18
#ifndef __SGI_STL_INTERNAL_ALLOC_H
19
#define __SGI_STL_INTERNAL_ALLOC_H
20
 
21
// This implements some standard node allocators.  These are
22
// NOT the same as the allocators in the C++ draft standard or in
23
// in the original STL.  They do not encapsulate different pointer
24
// types; indeed we assume that there is only one pointer type.
25
// The allocation primitives are intended to allocate individual objects,
26
// not larger arenas as with the original STL allocators.
27
 
28
#include    // for __throw_bad_alloc
29
#include 
30
#include 
31
#include 
32
#include 
33
#ifndef __RESTRICT
34
#  define __RESTRICT
35
#endif
36
 
37
#ifdef __STL_THREADS
38
# include 
39
# define __NODE_ALLOCATOR_THREADS true
40
# ifdef __STL_SGI_THREADS
41
  // We test whether threads are in use before locking.
42
  // Perhaps this should be moved into stl_threads.h, but that
43
  // probably makes it harder to avoid the procedure call when
44
  // it isn't needed.
45
    extern "C" {
46
      extern int __us_rsthread_malloc;
47
    }
48
	// The above is copied from malloc.h.  Including 
49
	// would be cleaner but fails with certain levels of standard
50
	// conformance.
51
#   define __NODE_ALLOCATOR_LOCK if (threads && __us_rsthread_malloc) \
52
                { _S_node_allocator_lock._M_acquire_lock(); }
53
#   define __NODE_ALLOCATOR_UNLOCK if (threads && __us_rsthread_malloc) \
54
                { _S_node_allocator_lock._M_release_lock(); }
55
# else /* !__STL_SGI_THREADS */
56
#   define __NODE_ALLOCATOR_LOCK \
57
        { if (threads) _S_node_allocator_lock._M_acquire_lock(); }
58
#   define __NODE_ALLOCATOR_UNLOCK \
59
        { if (threads) _S_node_allocator_lock._M_release_lock(); }
60
# endif
61
#else
62
//  Thread-unsafe
63
#   define __NODE_ALLOCATOR_LOCK
64
#   define __NODE_ALLOCATOR_UNLOCK
65
#   define __NODE_ALLOCATOR_THREADS false
66
#endif
67
 
68
namespace std
69
{
70
 
71
// Malloc-based allocator.  Typically slower than default alloc below.
72
// Typically thread-safe and more storage efficient.
73
template 
74
class __malloc_alloc_template {
75
 
76
private:
77
 
78
  static void* _S_oom_malloc(size_t);
79
  static void* _S_oom_realloc(void*, size_t);
80
  static void (* __malloc_alloc_oom_handler)();
81
 
82
public:
83
 
84
  static void* allocate(size_t __n)
85
  {
86
    void* __result = malloc(__n);
87
    if (0 == __result) __result = _S_oom_malloc(__n);
88
    return __result;
89
  }
90
 
91
  static void deallocate(void* __p, size_t /* __n */)
92
  {
93
    free(__p);
94
  }
95
 
96
  static void* reallocate(void* __p, size_t /* old_sz */, size_t __new_sz)
97
  {
98
    void* __result = realloc(__p, __new_sz);
99
    if (0 == __result) __result = _S_oom_realloc(__p, __new_sz);
100
    return __result;
101
  }
102
 
103
  static void (* __set_malloc_handler(void (*__f)()))()
104
  {
105
    void (* __old)() = __malloc_alloc_oom_handler;
106
    __malloc_alloc_oom_handler = __f;
107
    return(__old);
108
  }
109
 
110
};
111
 
112
// malloc_alloc out-of-memory handling
113
 
114
template 
115
void (* __malloc_alloc_template<__inst>::__malloc_alloc_oom_handler)() = 0;
116
 
117
template 
118
void*
119
__malloc_alloc_template<__inst>::_S_oom_malloc(size_t __n)
120
{
121
    void (* __my_malloc_handler)();
122
    void* __result;
123
 
124
    for (;;) {
125
        __my_malloc_handler = __malloc_alloc_oom_handler;
126
        if (0 == __my_malloc_handler) { std::__throw_bad_alloc(); }
127
        (*__my_malloc_handler)();
128
        __result = malloc(__n);
129
        if (__result) return(__result);
130
    }
131
}
132
 
133
template 
134
void* __malloc_alloc_template<__inst>::_S_oom_realloc(void* __p, size_t __n)
135
{
136
    void (* __my_malloc_handler)();
137
    void* __result;
138
 
139
    for (;;) {
140
        __my_malloc_handler = __malloc_alloc_oom_handler;
141
        if (0 == __my_malloc_handler) { std::__throw_bad_alloc(); }
142
        (*__my_malloc_handler)();
143
        __result = realloc(__p, __n);
144
        if (__result) return(__result);
145
    }
146
}
147
 
148
typedef __malloc_alloc_template<0> malloc_alloc;
149
 
150
template
151
class simple_alloc {
152
 
153
public:
154
    static _Tp* allocate(size_t __n)
155
      { return 0 == __n ? 0 : (_Tp*) _Alloc::allocate(__n * sizeof (_Tp)); }
156
    static _Tp* allocate(void)
157
      { return (_Tp*) _Alloc::allocate(sizeof (_Tp)); }
158
    static void deallocate(_Tp* __p, size_t __n)
159
      { if (0 != __n) _Alloc::deallocate(__p, __n * sizeof (_Tp)); }
160
    static void deallocate(_Tp* __p)
161
      { _Alloc::deallocate(__p, sizeof (_Tp)); }
162
};
163
 
164
// Allocator adaptor to check size arguments for debugging.
165
// Reports errors using assert.  Checking can be disabled with
166
// NDEBUG, but it's far better to just use the underlying allocator
167
// instead when no checking is desired.
168
// There is some evidence that this can confuse Purify.
169
template 
170
class debug_alloc {
171
 
172
private:
173
 
174
  enum {_S_extra = 8};  // Size of space used to store size.  Note
175
                        // that this must be large enough to preserve
176
                        // alignment.
177
 
178
public:
179
 
180
  static void* allocate(size_t __n)
181
  {
182
    char* __result = (char*)_Alloc::allocate(__n + (int) _S_extra);
183
    *(size_t*)__result = __n;
184
    return __result + (int) _S_extra;
185
  }
186
 
187
  static void deallocate(void* __p, size_t __n)
188
  {
189
    char* __real_p = (char*)__p - (int) _S_extra;
190
    assert(*(size_t*)__real_p == __n);
191
    _Alloc::deallocate(__real_p, __n + (int) _S_extra);
192
  }
193
 
194
  static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz)
195
  {
196
    char* __real_p = (char*)__p - (int) _S_extra;
197
    assert(*(size_t*)__real_p == __old_sz);
198
    char* __result = (char*)
199
      _Alloc::reallocate(__real_p, __old_sz + (int) _S_extra,
200
                                   __new_sz + (int) _S_extra);
201
    *(size_t*)__result = __new_sz;
202
    return __result + (int) _S_extra;
203
  }
204
 
205
};
206
 
207
 
208
# ifdef __USE_MALLOC
209
 
210
typedef malloc_alloc alloc;
211
typedef malloc_alloc single_client_alloc;
212
 
213
# else
214
 
215
 
216
// Default node allocator.
217
// With a reasonable compiler, this should be roughly as fast as the
218
// original STL class-specific allocators, but with less fragmentation.
219
// Default_alloc_template parameters are experimental and MAY
220
// DISAPPEAR in the future.  Clients should just use alloc for now.
221
//
222
// Important implementation properties:
223
// 1. If the client request an object of size > _MAX_BYTES, the resulting
224
//    object will be obtained directly from malloc.
225
// 2. In all other cases, we allocate an object of size exactly
226
//    _S_round_up(requested_size).  Thus the client has enough size
227
//    information that we can return the object to the proper free list
228
//    without permanently losing part of the object.
229
//
230
 
231
// The first template parameter specifies whether more than one thread
232
// may use this allocator.  It is safe to allocate an object from
233
// one instance of a default_alloc and deallocate it with another
234
// one.  This effectively transfers its ownership to the second one.
235
// This may have undesirable effects on reference locality.
236
// The second parameter is unreferenced and serves only to allow the
237
// creation of multiple default_alloc instances.
238
// Node that containers built on different allocator instances have
239
// different types, limiting the utility of this approach.
240
 
241
template 
242
class __default_alloc_template {
243
 
244
private:
245
  // Really we should use static const int x = N
246
  // instead of enum { x = N }, but few compilers accept the former.
247
  enum {_ALIGN = 8};
248
  enum {_MAX_BYTES = 128};
249
  enum {_NFREELISTS = 16}; // _MAX_BYTES/_ALIGN
250
  static size_t
251
  _S_round_up(size_t __bytes)
252
    { return (((__bytes) + (size_t) _ALIGN-1) & ~((size_t) _ALIGN - 1)); }
253
 
254
  union _Obj {
255
        union _Obj* _M_free_list_link;
256
        char _M_client_data[1];    /* The client sees this.        */
257
  };
258
 
259
  static _Obj* __STL_VOLATILE _S_free_list[];
260
        // Specifying a size results in duplicate def for 4.1
261
  static  size_t _S_freelist_index(size_t __bytes) {
262
        return (((__bytes) + (size_t)_ALIGN-1)/(size_t)_ALIGN - 1);
263
  }
264
 
265
  // Returns an object of size __n, and optionally adds to size __n free list.
266
  static void* _S_refill(size_t __n);
267
  // Allocates a chunk for nobjs of size size.  nobjs may be reduced
268
  // if it is inconvenient to allocate the requested number.
269
  static char* _S_chunk_alloc(size_t __size, int& __nobjs);
270
 
271
  // Chunk allocation state.
272
  static char* _S_start_free;
273
  static char* _S_end_free;
274
  static size_t _S_heap_size;
275
 
276
# ifdef __STL_THREADS
277
    static _STL_mutex_lock _S_node_allocator_lock;
278
# endif
279
 
280
    // It would be nice to use _STL_auto_lock here.  But we
281
    // don't need the NULL check.  And we do need a test whether
282
    // threads have actually been started.
283
    class _Lock;
284
    friend class _Lock;
285
    class _Lock {
286
        public:
287
            _Lock() { __NODE_ALLOCATOR_LOCK; }
288
            ~_Lock() { __NODE_ALLOCATOR_UNLOCK; }
289
    };
290
 
291
public:
292
 
293
  /* __n must be > 0      */
294
  static void* allocate(size_t __n)
295
  {
296
    void* __ret = 0;
297
 
298
    if (__n > (size_t) _MAX_BYTES) {
299
      __ret = malloc_alloc::allocate(__n);
300
    }
301
    else {
302
      _Obj* __STL_VOLATILE* __my_free_list
303
          = _S_free_list + _S_freelist_index(__n);
304
      // Acquire the lock here with a constructor call.
305
      // This ensures that it is released in exit or during stack
306
      // unwinding.
307
#     ifndef _NOTHREADS
308
      /*REFERENCED*/
309
      _Lock __lock_instance;
310
#     endif
311
      _Obj* __RESTRICT __result = *__my_free_list;
312
      if (__result == 0)
313
        __ret = _S_refill(_S_round_up(__n));
314
      else {
315
        *__my_free_list = __result -> _M_free_list_link;
316
        __ret = __result;
317
      }
318
    }
319
 
320
    return __ret;
321
  };
322
 
323
  /* __p may not be 0 */
324
  static void deallocate(void* __p, size_t __n)
325
  {
326
    if (__n > (size_t) _MAX_BYTES)
327
      malloc_alloc::deallocate(__p, __n);
328
    else {
329
      _Obj* __STL_VOLATILE*  __my_free_list
330
          = _S_free_list + _S_freelist_index(__n);
331
      _Obj* __q = (_Obj*)__p;
332
 
333
      // acquire lock
334
#       ifndef _NOTHREADS
335
      /*REFERENCED*/
336
      _Lock __lock_instance;
337
#       endif /* _NOTHREADS */
338
      __q -> _M_free_list_link = *__my_free_list;
339
      *__my_free_list = __q;
340
      // lock is released here
341
    }
342
  }
343
 
344
  static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz);
345
 
346
} ;
347
 
348
typedef __default_alloc_template<__NODE_ALLOCATOR_THREADS, 0> alloc;
349
typedef __default_alloc_template single_client_alloc;
350
 
351
template 
352
inline bool operator==(const __default_alloc_template<__threads, __inst>&,
353
                       const __default_alloc_template<__threads, __inst>&)
354
{
355
  return true;
356
}
357
 
358
template 
359
inline bool operator!=(const __default_alloc_template<__threads, __inst>&,
360
                       const __default_alloc_template<__threads, __inst>&)
361
{
362
  return false;
363
}
364
 
365
 
366
 
367
/* We allocate memory in large chunks in order to avoid fragmenting     */
368
/* the malloc heap too much.                                            */
369
/* We assume that size is properly aligned.                             */
370
/* We hold the allocation lock.                                         */
371
template 
372
char*
373
__default_alloc_template<__threads, __inst>::_S_chunk_alloc(size_t __size,
374
                                                            int& __nobjs)
375
{
376
    char* __result;
377
    size_t __total_bytes = __size * __nobjs;
378
    size_t __bytes_left = _S_end_free - _S_start_free;
379
 
380
    if (__bytes_left >= __total_bytes) {
381
        __result = _S_start_free;
382
        _S_start_free += __total_bytes;
383
        return(__result);
384
    } else if (__bytes_left >= __size) {
385
        __nobjs = (int)(__bytes_left/__size);
386
        __total_bytes = __size * __nobjs;
387
        __result = _S_start_free;
388
        _S_start_free += __total_bytes;
389
        return(__result);
390
    } else {
391
        size_t __bytes_to_get =
392
	  2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
393
        // Try to make use of the left-over piece.
394
        if (__bytes_left > 0) {
395
            _Obj* __STL_VOLATILE* __my_free_list =
396
                        _S_free_list + _S_freelist_index(__bytes_left);
397
 
398
            ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
399
            *__my_free_list = (_Obj*)_S_start_free;
400
        }
401
        _S_start_free = (char*)malloc(__bytes_to_get);
402
        if (0 == _S_start_free) {
403
            size_t __i;
404
            _Obj* __STL_VOLATILE* __my_free_list;
405
	    _Obj* __p;
406
            // Try to make do with what we have.  That can't
407
            // hurt.  We do not try smaller requests, since that tends
408
            // to result in disaster on multi-process machines.
409
            for (__i = __size;
410
                 __i <= (size_t) _MAX_BYTES;
411
                 __i += (size_t) _ALIGN) {
412
                __my_free_list = _S_free_list + _S_freelist_index(__i);
413
                __p = *__my_free_list;
414
                if (0 != __p) {
415
                    *__my_free_list = __p -> _M_free_list_link;
416
                    _S_start_free = (char*)__p;
417
                    _S_end_free = _S_start_free + __i;
418
                    return(_S_chunk_alloc(__size, __nobjs));
419
                    // Any leftover piece will eventually make it to the
420
                    // right free list.
421
                }
422
            }
423
	    _S_end_free = 0;	// In case of exception.
424
            _S_start_free = (char*)malloc_alloc::allocate(__bytes_to_get);
425
            // This should either throw an
426
            // exception or remedy the situation.  Thus we assume it
427
            // succeeded.
428
        }
429
        _S_heap_size += __bytes_to_get;
430
        _S_end_free = _S_start_free + __bytes_to_get;
431
        return(_S_chunk_alloc(__size, __nobjs));
432
    }
433
}
434
 
435
 
436
/* Returns an object of size __n, and optionally adds to size __n free list.*/
437
/* We assume that __n is properly aligned.                                */
438
/* We hold the allocation lock.                                         */
439
template 
440
void*
441
__default_alloc_template<__threads, __inst>::_S_refill(size_t __n)
442
{
443
    int __nobjs = 20;
444
    char* __chunk = _S_chunk_alloc(__n, __nobjs);
445
    _Obj* __STL_VOLATILE* __my_free_list;
446
    _Obj* __result;
447
    _Obj* __current_obj;
448
    _Obj* __next_obj;
449
    int __i;
450
 
451
    if (1 == __nobjs) return(__chunk);
452
    __my_free_list = _S_free_list + _S_freelist_index(__n);
453
 
454
    /* Build free list in chunk */
455
      __result = (_Obj*)__chunk;
456
      *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
457
      for (__i = 1; ; __i++) {
458
        __current_obj = __next_obj;
459
        __next_obj = (_Obj*)((char*)__next_obj + __n);
460
        if (__nobjs - 1 == __i) {
461
            __current_obj -> _M_free_list_link = 0;
462
            break;
463
        } else {
464
            __current_obj -> _M_free_list_link = __next_obj;
465
        }
466
      }
467
    return(__result);
468
}
469
 
470
template 
471
void*
472
__default_alloc_template::reallocate(void* __p,
473
                                                    size_t __old_sz,
474
                                                    size_t __new_sz)
475
{
476
    void* __result;
477
    size_t __copy_sz;
478
 
479
    if (__old_sz > (size_t) _MAX_BYTES && __new_sz > (size_t) _MAX_BYTES) {
480
        return(realloc(__p, __new_sz));
481
    }
482
    if (_S_round_up(__old_sz) == _S_round_up(__new_sz)) return(__p);
483
    __result = allocate(__new_sz);
484
    __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
485
    memcpy(__result, __p, __copy_sz);
486
    deallocate(__p, __old_sz);
487
    return(__result);
488
}
489
 
490
#ifdef __STL_THREADS
491
    template 
492
    _STL_mutex_lock
493
    __default_alloc_template<__threads, __inst>::_S_node_allocator_lock
494
        __STL_MUTEX_INITIALIZER;
495
#endif
496
 
497
 
498
template 
499
char* __default_alloc_template<__threads, __inst>::_S_start_free = 0;
500
 
501
template 
502
char* __default_alloc_template<__threads, __inst>::_S_end_free = 0;
503
 
504
template 
505
size_t __default_alloc_template<__threads, __inst>::_S_heap_size = 0;
506
 
507
template 
508
typename __default_alloc_template<__threads, __inst>::_Obj* __STL_VOLATILE
509
__default_alloc_template<__threads, __inst> ::_S_free_list[
510
    __default_alloc_template<__threads, __inst>::_NFREELISTS
511
] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, };
512
// The 16 zeros are necessary to make version 4.1 of the SunPro
513
// compiler happy.  Otherwise it appears to allocate too little
514
// space for the array.
515
 
516
#endif /* ! __USE_MALLOC */
517
 
518
// This implements allocators as specified in the C++ standard.
519
//
520
// Note that standard-conforming allocators use many language features
521
// that are not yet widely implemented.  In particular, they rely on
522
// member templates, partial specialization, partial ordering of function
523
// templates, the typename keyword, and the use of the template keyword
524
// to refer to a template member of a dependent type.
525
 
526
template 
527
class allocator {
528
  typedef alloc _Alloc;          // The underlying allocator.
529
public:
530
  typedef size_t     size_type;
531
  typedef ptrdiff_t  difference_type;
532
  typedef _Tp*       pointer;
533
  typedef const _Tp* const_pointer;
534
  typedef _Tp&       reference;
535
  typedef const _Tp& const_reference;
536
  typedef _Tp        value_type;
537
 
538
  template  struct rebind {
539
    typedef allocator<_Tp1> other;
540
  };
541
 
542
  allocator() __STL_NOTHROW {}
543
  allocator(const allocator&) __STL_NOTHROW {}
544
  template  allocator(const allocator<_Tp1>&) __STL_NOTHROW {}
545
  ~allocator() __STL_NOTHROW {}
546
 
547
  pointer address(reference __x) const { return &__x; }
548
  const_pointer address(const_reference __x) const { return &__x; }
549
 
550
  // __n is permitted to be 0.  The C++ standard says nothing about what
551
  // the return value is when __n == 0.
552
  _Tp* allocate(size_type __n, const void* = 0) {
553
    return __n != 0 ? static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)))
554
                    : 0;
555
  }
556
 
557
  // __p is not permitted to be a null pointer.
558
  void deallocate(pointer __p, size_type __n)
559
    { _Alloc::deallocate(__p, __n * sizeof(_Tp)); }
560
 
561
  size_type max_size() const __STL_NOTHROW
562
    { return size_t(-1) / sizeof(_Tp); }
563
 
564
  void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
565
  void destroy(pointer __p) { __p->~_Tp(); }
566
};
567
 
568
template<>
569
class allocator {
570
public:
571
  typedef size_t      size_type;
572
  typedef ptrdiff_t   difference_type;
573
  typedef void*       pointer;
574
  typedef const void* const_pointer;
575
  typedef void        value_type;
576
 
577
  template  struct rebind {
578
    typedef allocator<_Tp1> other;
579
  };
580
};
581
 
582
 
583
template 
584
inline bool operator==(const allocator<_T1>&, const allocator<_T2>&)
585
{
586
  return true;
587
}
588
 
589
template 
590
inline bool operator!=(const allocator<_T1>&, const allocator<_T2>&)
591
{
592
  return false;
593
}
594
 
595
// Allocator adaptor to turn an SGI-style allocator (e.g. alloc, malloc_alloc)
596
// into a standard-conforming allocator.   Note that this adaptor does
597
// *not* assume that all objects of the underlying alloc class are
598
// identical, nor does it assume that all of the underlying alloc's
599
// member functions are static member functions.  Note, also, that
600
// __allocator<_Tp, alloc> is essentially the same thing as allocator<_Tp>.
601
 
602
template 
603
struct __allocator {
604
  _Alloc __underlying_alloc;
605
 
606
  typedef size_t    size_type;
607
  typedef ptrdiff_t difference_type;
608
  typedef _Tp*       pointer;
609
  typedef const _Tp* const_pointer;
610
  typedef _Tp&       reference;
611
  typedef const _Tp& const_reference;
612
  typedef _Tp        value_type;
613
 
614
  template  struct rebind {
615
    typedef __allocator<_Tp1, _Alloc> other;
616
  };
617
 
618
  __allocator() __STL_NOTHROW {}
619
  __allocator(const __allocator& __a) __STL_NOTHROW
620
    : __underlying_alloc(__a.__underlying_alloc) {}
621
  template 
622
  __allocator(const __allocator<_Tp1, _Alloc>& __a) __STL_NOTHROW
623
    : __underlying_alloc(__a.__underlying_alloc) {}
624
  ~__allocator() __STL_NOTHROW {}
625
 
626
  pointer address(reference __x) const { return &__x; }
627
  const_pointer address(const_reference __x) const { return &__x; }
628
 
629
  // __n is permitted to be 0.
630
  _Tp* allocate(size_type __n, const void* = 0) {
631
    return __n != 0
632
        ? static_cast<_Tp*>(__underlying_alloc.allocate(__n * sizeof(_Tp)))
633
        : 0;
634
  }
635
 
636
  // __p is not permitted to be a null pointer.
637
  void deallocate(pointer __p, size_type __n)
638
    { __underlying_alloc.deallocate(__p, __n * sizeof(_Tp)); }
639
 
640
  size_type max_size() const __STL_NOTHROW
641
    { return size_t(-1) / sizeof(_Tp); }
642
 
643
  void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
644
  void destroy(pointer __p) { __p->~_Tp(); }
645
};
646
 
647
template 
648
class __allocator {
649
  typedef size_t      size_type;
650
  typedef ptrdiff_t   difference_type;
651
  typedef void*       pointer;
652
  typedef const void* const_pointer;
653
  typedef void        value_type;
654
 
655
  template  struct rebind {
656
    typedef __allocator<_Tp1, _Alloc> other;
657
  };
658
};
659
 
660
template 
661
inline bool operator==(const __allocator<_Tp, _Alloc>& __a1,
662
                       const __allocator<_Tp, _Alloc>& __a2)
663
{
664
  return __a1.__underlying_alloc == __a2.__underlying_alloc;
665
}
666
 
667
template 
668
inline bool operator!=(const __allocator<_Tp, _Alloc>& __a1,
669
                       const __allocator<_Tp, _Alloc>& __a2)
670
{
671
  return __a1.__underlying_alloc != __a2.__underlying_alloc;
672
}
673
 
674
// Comparison operators for all of the predifined SGI-style allocators.
675
// This ensures that __allocator (for example) will
676
// work correctly.
677
 
678
template 
679
inline bool operator==(const __malloc_alloc_template&,
680
                       const __malloc_alloc_template&)
681
{
682
  return true;
683
}
684
 
685
template 
686
inline bool operator!=(const __malloc_alloc_template<__inst>&,
687
                       const __malloc_alloc_template<__inst>&)
688
{
689
  return false;
690
}
691
 
692
template 
693
inline bool operator==(const debug_alloc<_Alloc>&,
694
                       const debug_alloc<_Alloc>&) {
695
  return true;
696
}
697
 
698
template 
699
inline bool operator!=(const debug_alloc<_Alloc>&,
700
                       const debug_alloc<_Alloc>&) {
701
  return false;
702
}
703
 
704
// Another allocator adaptor: _Alloc_traits.  This serves two
705
// purposes.  First, make it possible to write containers that can use
706
// either SGI-style allocators or standard-conforming allocator.
707
// Second, provide a mechanism so that containers can query whether or
708
// not the allocator has distinct instances.  If not, the container
709
// can avoid wasting a word of memory to store an empty object.
710
 
711
// This adaptor uses partial specialization.  The general case of
712
// _Alloc_traits<_Tp, _Alloc> assumes that _Alloc is a
713
// standard-conforming allocator, possibly with non-equal instances
714
// and non-static members.  (It still behaves correctly even if _Alloc
715
// has static member and if all instances are equal.  Refinements
716
// affect performance, not correctness.)
717
 
718
// There are always two members: allocator_type, which is a standard-
719
// conforming allocator type for allocating objects of type _Tp, and
720
// _S_instanceless, a static const member of type bool.  If
721
// _S_instanceless is true, this means that there is no difference
722
// between any two instances of type allocator_type.  Furthermore, if
723
// _S_instanceless is true, then _Alloc_traits has one additional
724
// member: _Alloc_type.  This type encapsulates allocation and
725
// deallocation of objects of type _Tp through a static interface; it
726
// has two member functions, whose signatures are
727
//    static _Tp* allocate(size_t)
728
//    static void deallocate(_Tp*, size_t)
729
 
730
// The fully general version.
731
 
732
template 
733
struct _Alloc_traits
734
{
735
  static const bool _S_instanceless = false;
736
  typedef typename _Allocator::template rebind<_Tp>::other allocator_type;
737
};
738
 
739
template 
740
const bool _Alloc_traits<_Tp, _Allocator>::_S_instanceless;
741
 
742
// The version for the default allocator.
743
 
744
template 
745
struct _Alloc_traits<_Tp, allocator<_Tp1> >
746
{
747
  static const bool _S_instanceless = true;
748
  typedef simple_alloc<_Tp, alloc> _Alloc_type;
749
  typedef allocator<_Tp> allocator_type;
750
};
751
 
752
// Versions for the predefined SGI-style allocators.
753
 
754
template 
755
struct _Alloc_traits<_Tp, __malloc_alloc_template<__inst> >
756
{
757
  static const bool _S_instanceless = true;
758
  typedef simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
759
  typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
760
};
761
 
762
#ifndef __USE_MALLOC
763
template 
764
struct _Alloc_traits<_Tp, __default_alloc_template<__threads, __inst> >
765
{
766
  static const bool _S_instanceless = true;
767
  typedef simple_alloc<_Tp, __default_alloc_template<__threads, __inst> >
768
          _Alloc_type;
769
  typedef __allocator<_Tp, __default_alloc_template<__threads, __inst> >
770
          allocator_type;
771
};
772
#endif
773
 
774
template 
775
struct _Alloc_traits<_Tp, debug_alloc<_Alloc> >
776
{
777
  static const bool _S_instanceless = true;
778
  typedef simple_alloc<_Tp, debug_alloc<_Alloc> > _Alloc_type;
779
  typedef __allocator<_Tp, debug_alloc<_Alloc> > allocator_type;
780
};
781
 
782
// Versions for the __allocator adaptor used with the predefined
783
// SGI-style allocators.
784
 
785
template 
786
struct _Alloc_traits<_Tp,
787
                     __allocator<_Tp1, __malloc_alloc_template<__inst> > >
788
{
789
  static const bool _S_instanceless = true;
790
  typedef simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
791
  typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
792
};
793
 
794
#ifndef __USE_MALLOC
795
template 
796
struct _Alloc_traits<_Tp,
797
                      __allocator<_Tp1,
798
                                  __default_alloc_template<__thr, __inst> > >
799
{
800
  static const bool _S_instanceless = true;
801
  typedef simple_alloc<_Tp, __default_alloc_template<__thr,__inst> >
802
          _Alloc_type;
803
  typedef __allocator<_Tp, __default_alloc_template<__thr,__inst> >
804
          allocator_type;
805
};
806
#endif
807
 
808
template 
809
struct _Alloc_traits<_Tp, __allocator<_Tp1, debug_alloc<_Alloc> > >
810
{
811
  static const bool _S_instanceless = true;
812
  typedef simple_alloc<_Tp, debug_alloc<_Alloc> > _Alloc_type;
813
  typedef __allocator<_Tp, debug_alloc<_Alloc> > allocator_type;
814
};
815
 
816
} // namespace std
817
 
818
#endif /* __SGI_STL_INTERNAL_ALLOC_H */
819
 
820
// Local Variables:
821
// mode:C++
822
// End: