_alloc_old.h

00001 template<class _Tp, class _Alloc>
00002 class __simple_alloc {
00003   typedef _Alloc __alloc_type;
00004 public:
00005   typedef typename _Alloc::value_type __alloc_value_type;
00006   typedef _Tp value_type;
00007   static size_t  __STL_CALL __chunk(size_t __n) { 
00008     return (sizeof(__alloc_value_type)==sizeof(value_type)) ? __n : 
00009             ((__n*sizeof(value_type)+sizeof(__alloc_value_type)-1)/sizeof(__alloc_value_type));
00010   }
00011   static _Tp*  __STL_CALL allocate(size_t __n) { return 0 == __n ? 0 : (_Tp*) __alloc_type::allocate(__chunk(__n)); }
00012   static void  __STL_CALL deallocate(_Tp * __p, size_t __n) { 
00013     __alloc_type::deallocate((__alloc_value_type*)__p, __chunk(__n)); }
00014 };
00015 
00016 // Allocator adaptor to turn an SGI-style allocator (e.g. alloc, malloc_alloc)
00017 // into a standard-conforming allocator.   Note that this adaptor does
00018 // *not* assume that all objects of the underlying alloc class are
00019 // identical, nor does it assume that all of the underlying alloc's
00020 // member functions are static member functions.  Note, also, that 
00021 // __allocator<_Tp, alloc> is essentially the same thing as allocator<_Tp>.
00022 
00023 template <class _Tp, class _Alloc>
00024 struct __allocator : public _Alloc {
00025   typedef _Alloc __underlying_alloc;
00026 
00027   typedef size_t    size_type;
00028   typedef ptrdiff_t difference_type;
00029   typedef _Tp*       pointer;
00030   typedef const _Tp* const_pointer;
00031   typedef _Tp&       reference;
00032   typedef const _Tp& const_reference;
00033   typedef _Tp        value_type;
00034 
00035 # if defined (__STL_MEMBER_TEMPLATE_CLASSES)
00036   template <class _Tp1> struct rebind {
00037     typedef __allocator<_Tp1, _Alloc> other;
00038   };
00039 # endif
00040   __allocator() __STL_NOTHROW {}
00041   __allocator(const _Alloc& ) __STL_NOTHROW {}
00042   __allocator(const __allocator<_Tp, _Alloc>& __a) __STL_NOTHROW
00043     : _Alloc(__a) {}
00044 # if defined (__STL_MEMBER_TEMPLATES) && defined (__STL_FUNCTION_TMPL_PARTIAL_ORDER)
00045   template <class _Tp1> 
00046   __allocator(const __allocator<_Tp1, _Alloc>& __a) __STL_NOTHROW
00047     : _Alloc(__a) {}
00048 # endif
00049 # ifdef __STL_TRIVIAL_DESTRUCTOR_BUG
00050   ~__allocator() __STL_NOTHROW {}
00051 # endif
00052   pointer address(reference __x) const { return &__x; }
00053 
00054 # if !defined (__WATCOM_CPLUSPLUS__)
00055   const_pointer address(const_reference __x) const { return &__x; }
00056 # endif
00057 
00058   // __n is permitted to be 0.
00059   _Tp* allocate(size_type __n, const void* = 0) {
00060     return __n != 0 
00061         ? __STATIC_CAST(_Tp*,__underlying_alloc::allocate(__n * sizeof(_Tp))) 
00062         : 0;
00063   }
00064 
00065   // __p is not permitted to be a null pointer.
00066   void deallocate(pointer __p, size_type __n)
00067     { if (__p) __underlying_alloc::deallocate(__p, __n * sizeof(_Tp)); }
00068 
00069   size_type max_size() const __STL_NOTHROW 
00070     { return size_t(-1) / sizeof(_Tp); }
00071 
00072   void construct(pointer __p, const _Tp& __val) { __STLPORT_STD::construct(__p, __val); }
00073   void destroy(pointer __p) { __STLPORT_STD::destroy(__p); }
00074 
00075   const __underlying_alloc& __get_underlying_alloc() const { return *this; }
00076 };
00077 
00078 #ifdef __STL_CLASS_PARTIAL_SPECIALIZATION
00079 template <class _Alloc>
00080 class __allocator<void, _Alloc> {
00081   typedef size_t      size_type;
00082   typedef ptrdiff_t   difference_type;
00083   typedef void*       pointer;
00084   typedef const void* const_pointer;
00085   typedef void        value_type;
00086 #ifdef __STL_MEMBER_TEMPLATE_CLASSES
00087   template <class _Tp1> struct rebind {
00088     typedef __allocator<_Tp1, _Alloc> other;
00089   };
00090 #endif
00091 };
00092 #endif
00093 
00094 template <class _Tp, class _Alloc>
00095 inline bool  __STL_CALL operator==(const __allocator<_Tp, _Alloc>& __a1,
00096                                    const __allocator<_Tp, _Alloc>& __a2)
00097 {
00098   return __a1.__get_underlying_alloc() == __a2.__get_underlying_alloc();
00099 }
00100 
00101 #ifdef __STL_USE_SEPARATE_RELOPS_NAMESPACE
00102 template <class _Tp, class _Alloc>
00103 inline bool  __STL_CALL operator!=(const __allocator<_Tp, _Alloc>& __a1,
00104                                    const __allocator<_Tp, _Alloc>& __a2)
00105 {
00106   return __a1.__get_underlying_alloc() != __a2.__get_underlying_alloc();
00107 }
00108 #endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
00109 
00110 
00111 // Comparison operators for all of the predifined SGI-style allocators.
00112 // This ensures that __allocator<malloc_alloc> (for example) will
00113 // work correctly.
00114 
00115 #ifndef __STL_NON_TYPE_TMPL_PARAM_BUG
00116 template <int inst>
00117 inline bool  __STL_CALL operator==(const __malloc_alloc<inst>&,
00118                                    const __malloc_alloc<inst>&)
00119 {
00120   return true;
00121 }
00122 
00123 #ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
00124 template <int __inst>
00125 inline bool  __STL_CALL operator!=(const __malloc_alloc<__inst>&,
00126                                    const __malloc_alloc<__inst>&)
00127 {
00128   return false;
00129 }
00130 #endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
00131 
00132 inline bool __STL_CALL operator==(const __new_alloc&, const __new_alloc&) { return true; }
00133 
00134 # ifdef __STL_USE_SEPARATE_RELOPS_NAMESPACE
00135 inline bool __STL_CALL operator!=(const __new_alloc&, const __new_alloc&) { return false; }
00136 # endif
00137 
00138 
00139 template <bool __threads, int __inst>
00140 inline bool  __STL_CALL operator==(const __node_alloc<__threads, __inst>&,
00141                                    const __node_alloc<__threads, __inst>&)
00142 {
00143   return true;
00144 }
00145 
00146 #if defined( __STL_FUNCTION_TMPL_PARTIAL_ORDER )
00147 
00148 template <bool __threads, int __inst>
00149 inline bool  __STL_CALL operator!=(const __node_alloc<__threads, __inst>&,
00150                                    const __node_alloc<__threads, __inst>&)
00151 {
00152   return false;
00153 }
00154 #endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
00155 
00156 #endif /* __STL_NON_TYPE_TMPL_PARAM_BUG */
00157 
00158 template <class _Alloc>
00159 inline bool  __STL_CALL operator==(const __debug_alloc<_Alloc>&, const __debug_alloc<_Alloc>&) {  return true; }
00160 # ifdef __STL_USE_SEPARATE_RELOPS_NAMESPACE
00161 template <class _Alloc>
00162 inline bool  __STL_CALL operator!=(const __debug_alloc<_Alloc>&, const __debug_alloc<_Alloc>&) {  return false; }
00163 # endif
00164 
00165 #if defined (__STL_CLASS_PARTIAL_SPECIALIZATION)
00166 
00167 // Versions for the predefined SGI-style allocators.
00168 template <class _Tp, int __inst>
00169 struct _Alloc_traits<_Tp, __malloc_alloc<__inst> > {
00170   typedef __allocator<_Tp, __malloc_alloc<__inst> > allocator_type;
00171 };
00172 
00173 
00174 template <class _Tp, bool __threads, int __inst>
00175 struct _Alloc_traits<_Tp, __node_alloc<__threads, __inst> > {
00176   typedef __allocator<_Tp, __node_alloc<__threads, __inst> > 
00177           allocator_type;
00178 };
00179 
00180 template <class _Tp, class _Alloc>
00181 struct _Alloc_traits<_Tp, __debug_alloc<_Alloc> > {
00182   typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type;
00183 };
00184 
00185 // Versions for the __allocator adaptor used with the predefined
00186 // SGI-style allocators.
00187 
00188 template <class _Tp, class _Tp1, class _Alloc>
00189 struct _Alloc_traits<_Tp, __allocator<_Tp1, _Alloc > > {
00190   typedef __allocator<_Tp, _Alloc > allocator_type;
00191 };
00192 
00193 #endif
00194 
00195 #if !defined (__STL_MEMBER_TEMPLATE_CLASSES) 
00196 
00197 // Versions for the predefined SGI-style allocators.
00198 
00199 
00200 #  if defined (__STL_NON_TYPE_TMPL_PARAM_BUG)
00201 
00202 typedef __malloc_alloc<0> __malloc_alloc_dfl;
00203 typedef __node_alloc<false, 0> __single_client_node_alloc;
00204 typedef __node_alloc<true, 0>  __multithreaded_node_alloc;
00205 
00206 template <class _Tp>
00207 inline __allocator<_Tp, __malloc_alloc_dfl >& __STL_CALL
00208 __stl_alloc_rebind(__malloc_alloc_dfl& __a, const _Tp*) {
00209   return (__allocator<_Tp, __malloc_alloc_dfl >&)__a;
00210 }
00211 
00212 template <class _Tp>
00213 inline __allocator<_Tp, __single_client_node_alloc >& __STL_CALL
00214 __stl_alloc_rebind(__single_client_node_alloc& __a, const _Tp*) {
00215   return (__allocator<_Tp, __single_client_node_alloc >&)__a;
00216 }
00217 
00218 template <class _Tp>
00219 inline __allocator<_Tp, __multithreaded_node_alloc >& __STL_CALL
00220 __stl_alloc_rebind(__multithreaded_node_alloc& __a, const _Tp*) {
00221   return (__allocator<_Tp, __multithreaded_node_alloc >&)__a;
00222 }
00223 
00224 template <class _Tp>
00225 inline __allocator<_Tp, __malloc_alloc_dfl > __STL_CALL
00226 __stl_alloc_create(const __malloc_alloc_dfl&, const _Tp*) {
00227   return __allocator<_Tp, __malloc_alloc_dfl > ();
00228 }
00229 
00230 template <class _Tp>
00231 inline __allocator<_Tp, __single_client_node_alloc > __STL_CALL
00232 __stl_alloc_create(const __single_client_node_alloc&, const _Tp*) {
00233   return __allocator<_Tp, __single_client_node_alloc >();
00234 }
00235 
00236 template <class _Tp>
00237 inline __allocator<_Tp, __multithreaded_node_alloc > __STL_CALL
00238 __stl_alloc_create(const __multithreaded_node_alloc&, const _Tp*) {
00239   return __allocator<_Tp, __multithreaded_node_alloc >();
00240 }
00241 
00242 #  else
00243 
00244 template <class _Tp, int __inst>
00245 inline __allocator<_Tp, __malloc_alloc<__inst> >& __STL_CALL
00246 __stl_alloc_rebind(__malloc_alloc<__inst>& __a, const _Tp*) {
00247   return (__allocator<_Tp, __malloc_alloc<__inst> >&)__a;
00248 }
00249 
00250 template <class _Tp, bool __threads, int __inst>
00251 inline __allocator<_Tp, __node_alloc<__threads, __inst> >& __STL_CALL
00252 __stl_alloc_rebind(__node_alloc<__threads, __inst>& __a, const _Tp*) {
00253   return (__allocator<_Tp, __node_alloc<__threads, __inst> >&)__a;
00254 }
00255 
00256 template <class _Tp, int __inst>
00257 inline __allocator<_Tp, __malloc_alloc<__inst> > __STL_CALL
00258 __stl_alloc_create(const __malloc_alloc<__inst>&, const _Tp*) {
00259   return __allocator<_Tp, __malloc_alloc<__inst> >();
00260 }
00261 
00262 template <class _Tp, bool __threads, int __inst>
00263 inline __allocator<_Tp, __node_alloc<__threads, __inst> > __STL_CALL
00264 __stl_alloc_create(const __node_alloc<__threads, __inst>&, const _Tp*) {
00265   return __allocator<_Tp, __node_alloc<__threads, __inst> >();
00266 }
00267 
00268 #  endif
00269 
00270 template <class _Tp, class _Alloc>
00271 inline __allocator<_Tp, __debug_alloc<_Alloc> > __STL_CALL
00272 __stl_alloc_create(const __debug_alloc<_Alloc>&, const _Tp*) {
00273   return __allocator<_Tp, __debug_alloc<_Alloc> >();
00274 }
00275 template <class _Tp, class _Alloc>
00276 inline __allocator<_Tp, __debug_alloc<_Alloc> >& __STL_CALL
00277 __stl_alloc_rebind(__debug_alloc<_Alloc>& __a, const _Tp*) {
00278   return (__allocator<_Tp, __debug_alloc<_Alloc> >&)__a;
00279 }
00280 
00281 template <class _Tp>
00282 inline __allocator<_Tp, __new_alloc > __STL_CALL
00283 __stl_alloc_create(const __new_alloc&, const _Tp*) {
00284   return __allocator<_Tp, __new_alloc >();
00285 }
00286 template <class _Tp>
00287 inline __allocator<_Tp, __new_alloc >&  __STL_CALL
00288 __stl_alloc_rebind(__new_alloc& __a, const _Tp*) {
00289   return (__allocator<_Tp, __new_alloc >&)__a;
00290 }
00291 
00292 template <class _Tp1, class _Alloc, class _Tp2>
00293 inline __allocator<_Tp2, _Alloc>& __STL_CALL
00294 __stl_alloc_rebind(__allocator<_Tp1, _Alloc>& __a, const _Tp2*) {
00295   return (__allocator<_Tp2, _Alloc>&)__a;
00296 }
00297 
00298 template <class _Tp1, class _Alloc, class _Tp2>
00299 inline __allocator<_Tp2, _Alloc> __STL_CALL
00300 __stl_alloc_create(const __allocator<_Tp1, _Alloc>&, const _Tp2*) {
00301   return __allocator<_Tp2, _Alloc>();
00302 }
00303 #endif

Generated on Mon Jun 5 10:20:45 2006 for Intelligence.kdevelop by  doxygen 1.4.6