1 /**
2 * D binding to C++ std::allocator.
3 *
4 * Copyright: Copyright (c) 2019 D Language Foundation
5 * License: Distributed under the
6 * $(LINK2 http://www.boost.org/LICENSE_1_0.txt, Boost Software License 1.0).
7 * (See accompanying file LICENSE)
8 * Authors: Manu Evans
9 * Source: $(DRUNTIMESRC core/stdcpp/allocator.d)
10 */
11
12 module core.stdcpp.allocator;
13
14 // LDC: empty module for unsupported C++ runtimes
15 version (CppRuntime_Microsoft) version = Supported;
16 else version (CppRuntime_Gcc) version = Supported;
17 else version (CppRuntime_Clang) version = Supported;
18 version (Supported):
19
20 import core.stdcpp.new_;
21 import core.stdcpp.xutility : StdNamespace, __cpp_sized_deallocation, __cpp_aligned_new;
22
23 extern(C++, (StdNamespace)):
24
25 /**
26 * Allocators are classes that define memory models to be used by some parts of
27 * the C++ Standard Library, and most specifically, by STL containers.
28 */
29 extern(C++, class)
30 struct allocator(T)
31 {
32 static assert(!is(T == const), "The C++ Standard forbids containers of const elements because allocator!(const T) is ill-formed.");
33 static assert(!is(T == immutable), "immutable is not representable in C++");
34 static assert(!is(T == class), "Instantiation with `class` is not supported; D can't mangle the base (non-pointer) type of a class. Use `extern (C++, class) struct T { ... }` instead.");
35 extern(D):
36
37 ///
38 this(U)(ref allocator!U) {}
39
40 ///
41 alias size_type = size_t;
42 ///
43 alias difference_type = ptrdiff_t;
44 ///
45 alias pointer = T*;
46 ///
47 alias value_type = T;
48
49 ///
50 enum propagate_on_container_move_assignment = true;
51 ///
52 enum is_always_equal = true;
53
54 ///
55 alias rebind(U) = allocator!U;
56
57 version (CppRuntime_Microsoft)
58 {
59 import core.stdcpp.xutility : _MSC_VER;
60
61 ///
62 T* allocate(size_t count) @nogc
63 {
64 static if (_MSC_VER <= 1800)
65 {
66 import core.stdcpp.xutility : _Xbad_alloc;
67 if (count == 0)
68 return null;
69 void* mem;
70 if ((size_t.max / T.sizeof < count) || (mem = __cpp_new(count * T.sizeof)) is null)
71 _Xbad_alloc();
72 return cast(T*)mem;
73 }
74 else
75 {
76 enum _Align = _New_alignof!T;
77
78 static size_t _Get_size_of_n(T)(const size_t _Count)
79 {
80 static if (T.sizeof == 1)
81 return _Count;
82 else
83 {
84 enum size_t _Max_possible = size_t.max / T.sizeof;
85 return _Max_possible < _Count ? size_t.max : _Count * T.sizeof;
86 }
87 }
88
89 const size_t _Bytes = _Get_size_of_n!T(count);
90 if (_Bytes == 0)
91 return null;
92
93 static if (!__cpp_aligned_new || _Align <= __STDCPP_DEFAULT_NEW_ALIGNMENT__)
94 {
95 version (INTEL_ARCH)
96 {
97 if (_Bytes >= _Big_allocation_threshold)
98 return cast(T*)_Allocate_manually_vector_aligned(_Bytes);
99 }
100 return cast(T*)__cpp_new(_Bytes);
101 }
102 else
103 {
104 size_t _Passed_align = _Align;
105 version (INTEL_ARCH)
106 {
107 if (_Bytes >= _Big_allocation_threshold)
108 _Passed_align = _Align < _Big_allocation_alignment ? _Big_allocation_alignment : _Align;
109 }
110 return cast(T*)__cpp_new_aligned(_Bytes, cast(align_val_t)_Passed_align);
111 }
112 }
113 }
114 ///
115 void deallocate(T* ptr, size_t count) @nogc
116 {
117 static if (_MSC_VER <= 1800)
118 {
119 __cpp_delete(ptr);
120 }
121 else
122 {
123 // this is observed from VS2017
124 void* _Ptr = ptr;
125 size_t _Bytes = T.sizeof * count;
126
127 enum _Align = _New_alignof!T;
128 static if (!__cpp_aligned_new || _Align <= __STDCPP_DEFAULT_NEW_ALIGNMENT__)
129 {
130 version (INTEL_ARCH)
131 {
132 if (_Bytes >= _Big_allocation_threshold)
133 _Adjust_manually_vector_aligned(_Ptr, _Bytes);
134 }
135 static if (_MSC_VER <= 1900)
136 __cpp_delete(ptr);
137 else
138 __cpp_delete_size(_Ptr, _Bytes);
139 }
140 else
141 {
142 size_t _Passed_align = _Align;
143 version (INTEL_ARCH)
144 {
145 if (_Bytes >= _Big_allocation_threshold)
146 _Passed_align = _Align < _Big_allocation_alignment ? _Big_allocation_alignment : _Align;
147 }
148 __cpp_delete_size_aligned(_Ptr, _Bytes, cast(align_val_t)_Passed_align);
149 }
150 }
151 }
152
153 ///
154 enum size_t max_size = size_t.max / T.sizeof;
155 }
156 else version (CppRuntime_Gcc)
157 {
158 ///
159 T* allocate(size_t count, const(void)* = null) @nogc
160 {
161 // if (count > max_size)
162 // std::__throw_bad_alloc();
163
164 static if (__cpp_aligned_new && T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
165 return cast(T*)__cpp_new_aligned(count * T.sizeof, cast(align_val_t)T.alignof);
166 else
167 return cast(T*)__cpp_new(count * T.sizeof);
168 }
169 ///
170 void deallocate(T* ptr, size_t count) @nogc
171 {
172 // NOTE: GCC doesn't seem to use the sized delete when it's available...
173
174 static if (__cpp_aligned_new && T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
175 __cpp_delete_aligned(cast(void*)ptr, cast(align_val_t)T.alignof);
176 else
177 __cpp_delete(cast(void*)ptr);
178 }
179
180 ///
181 enum size_t max_size = (ptrdiff_t.max < size_t.max ? cast(size_t)ptrdiff_t.max : size_t.max) / T.sizeof;
182 }
183 else version (CppRuntime_Clang)
184 {
185 ///
186 T* allocate(size_t count, const(void)* = null) @nogc
187 {
188 // if (count > max_size)
189 // __throw_length_error("allocator!T.allocate(size_t n) 'n' exceeds maximum supported size");
190
191 static if (__cpp_aligned_new && T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
192 return cast(T*)__cpp_new_aligned(count * T.sizeof, cast(align_val_t)T.alignof);
193 else
194 return cast(T*)__cpp_new(count * T.sizeof);
195 }
196 ///
197 void deallocate(T* ptr, size_t count) @nogc
198 {
199 static if (__cpp_aligned_new && T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__)
200 {
201 static if (__cpp_sized_deallocation)
202 return __cpp_delete_size_aligned(cast(void*)ptr, count * T.sizeof, cast(align_val_t)T.alignof);
203 else
204 return __cpp_delete_aligned(cast(void*)ptr, cast(align_val_t)T.alignof);
205 }
206 else static if (__cpp_sized_deallocation)
207 return __cpp_delete_size(cast(void*)ptr, count * T.sizeof);
208 else
209 return __cpp_delete(cast(void*)ptr);
210 }
211
212 ///
213 enum size_t max_size = size_t.max / T.sizeof;
214 }
215 else
216 {
217 static assert(false, "C++ runtime not supported");
218 }
219 }
220
221 ///
222 extern(C++, (StdNamespace))
223 struct allocator_traits(Alloc)
224 {
225 import core.internal.traits : isTrue;
226
227 ///
228 alias allocator_type = Alloc;
229 ///
230 alias value_type = allocator_type.value_type;
231 ///
232 alias size_type = allocator_type.size_type;
233 ///
234 alias difference_type = allocator_type.difference_type;
235 ///
236 alias pointer = allocator_type.pointer;
237
238 ///
239 enum propagate_on_container_copy_assignment = isTrue!(allocator_type, "propagate_on_container_copy_assignment");
240 ///
241 enum propagate_on_container_move_assignment = isTrue!(allocator_type, "propagate_on_container_move_assignment");
242 ///
243 enum propagate_on_container_swap = isTrue!(allocator_type, "propagate_on_container_swap");
244 ///
245 enum is_always_equal = isTrue!(allocator_type, "is_always_equal");
246
247 ///
248 template rebind_alloc(U)
249 {
250 static if (__traits(hasMember, allocator_type, "rebind"))
251 alias rebind_alloc = allocator_type.rebind!U;
252 else
253 alias rebind_alloc = allocator_type!U;
254 }
255 ///
256 alias rebind_traits(U) = allocator_traits!(rebind_alloc!U);
257
258 ///
259 static size_type max_size()(auto ref allocator_type a)
260 {
261 static if (__traits(hasMember, allocator_type, "max_size"))
262 return a.max_size();
263 else
264 return size_type.max / value_type.sizeof;
265 }
266
267 ///
268 static allocator_type select_on_container_copy_construction()(auto ref allocator_type a)
269 {
270 static if (__traits(hasMember, allocator_type, "select_on_container_copy_construction"))
271 return a.select_on_container_copy_construction();
272 else
273 return a;
274 }
275 }
276
277 private:
278
279 // MSVC has some bonus complexity!
280 version (CppRuntime_Microsoft)
281 {
282 // some versions of VS require a `* const` pointer mangling hack
283 // we need a way to supply the target VS version to the compile
284 version = NeedsMangleHack;
285
286 version (X86)
287 version = INTEL_ARCH;
288 version (X86_64)
289 version = INTEL_ARCH;
290
291 // HACK: should we guess _DEBUG for `debug` builds?
292 version (_DEBUG)
293 enum _DEBUG = true;
294 else version (NDEBUG)
295 enum _DEBUG = false;
296 else
297 {
298 import core.stdcpp.xutility : __CXXLIB__;
299 enum _DEBUG = __CXXLIB__.length && 'd' == __CXXLIB__[$-1]; // libcmtd, msvcrtd
300 }
301
302 enum _New_alignof(T) = T.alignof > __STDCPP_DEFAULT_NEW_ALIGNMENT__ ? T.alignof : __STDCPP_DEFAULT_NEW_ALIGNMENT__;
303
304 version (INTEL_ARCH)
305 {
306 enum size_t _Big_allocation_threshold = 4096;
307 enum size_t _Big_allocation_alignment = 32;
308
309 static assert(2 * (void*).sizeof <= _Big_allocation_alignment, "Big allocation alignment should at least match vector register alignment");
310 static assert((v => v != 0 && (v & (v - 1)) == 0)(_Big_allocation_alignment), "Big allocation alignment must be a power of two");
311 static assert(size_t.sizeof == (void*).sizeof, "uintptr_t is not the same size as size_t");
312
313 // NOTE: this must track `_DEBUG` macro used in C++...
314 static if (_DEBUG)
315 enum size_t _Non_user_size = 2 * (void*).sizeof + _Big_allocation_alignment - 1;
316 else
317 enum size_t _Non_user_size = (void*).sizeof + _Big_allocation_alignment - 1;
318
319 version (Win64)
320 enum size_t _Big_allocation_sentinel = 0xFAFAFAFAFAFAFAFA;
321 else
322 enum size_t _Big_allocation_sentinel = 0xFAFAFAFA;
323
324 extern(D) // Template so it gets compiled according to _DEBUG.
325 void* _Allocate_manually_vector_aligned()(const size_t _Bytes) @nogc
326 {
327 size_t _Block_size = _Non_user_size + _Bytes;
328 if (_Block_size <= _Bytes)
329 _Block_size = size_t.max;
330
331 const size_t _Ptr_container = cast(size_t)__cpp_new(_Block_size);
332 if (!(_Ptr_container != 0))
333 assert(false, "invalid argument");
334 void* _Ptr = cast(void*)((_Ptr_container + _Non_user_size) & ~(_Big_allocation_alignment - 1));
335 (cast(size_t*)_Ptr)[-1] = _Ptr_container;
336
337 static if (_DEBUG)
338 (cast(size_t*)_Ptr)[-2] = _Big_allocation_sentinel;
339 return (_Ptr);
340 }
341
342 extern(D) // Template so it gets compiled according to _DEBUG.
343 void _Adjust_manually_vector_aligned()(ref void* _Ptr, ref size_t _Bytes) pure nothrow @nogc
344 {
345 _Bytes += _Non_user_size;
346
347 const size_t* _Ptr_user = cast(size_t*)_Ptr;
348 const size_t _Ptr_container = _Ptr_user[-1];
349
350 // If the following asserts, it likely means that we are performing
351 // an aligned delete on memory coming from an unaligned allocation.
352 static if (_DEBUG)
353 assert(_Ptr_user[-2] == _Big_allocation_sentinel, "invalid argument");
354
355 // Extra paranoia on aligned allocation/deallocation; ensure _Ptr_container is
356 // in range [_Min_back_shift, _Non_user_size]
357 static if (_DEBUG)
358 enum size_t _Min_back_shift = 2 * (void*).sizeof;
359 else
360 enum size_t _Min_back_shift = (void*).sizeof;
361
362 const size_t _Back_shift = cast(size_t)_Ptr - _Ptr_container;
363 if (!(_Back_shift >= _Min_back_shift && _Back_shift <= _Non_user_size))
364 assert(false, "invalid argument");
365 _Ptr = cast(void*)_Ptr_container;
366 }
367 }
368 }
369 version (CppRuntime_Clang)
370 {
371 // Helper for container swap
372 package(core.stdcpp) void __swap_allocator(Alloc)(ref Alloc __a1, ref Alloc __a2)
373 {
374 import core.internal.lifetime : swap;
375
376 static if (allocator_traits!Alloc.propagate_on_container_swap)
377 swap(__a1, __a2);
378 }
379 }