@@ -148,12 +148,56 @@ const CAllocator = struct {
148
148
}
149
149
};
150
150
151
+ /// Supports the full Allocator interface, including alignment, and exploiting
152
+ /// `malloc_usable_size` if available. For an allocator that directly calls
153
+ /// `malloc`/`free`, see `raw_c_allocator`.
151
154
pub const c_allocator = & c_allocator_state ;
152
155
var c_allocator_state = Allocator {
153
156
.allocFn = CAllocator .alloc ,
154
157
.resizeFn = CAllocator .resize ,
155
158
};
156
159
160
+ /// Asserts allocations are within `@alignOf(std.c.max_align_t)` and directly calls
161
+ /// `malloc`/`free`. Does not attempt to utilize `malloc_usable_size`.
162
+ /// This allocator is safe to use as the backing allocator with
163
+ /// `ArenaAllocator` and `GeneralPurposeAllocator`, and is more optimal in these cases
164
+ /// than to using `c_allocator`.
165
+ pub const raw_c_allocator = & raw_c_allocator_state ;
166
+ var raw_c_allocator_state = Allocator {
167
+ .allocFn = rawCAlloc ,
168
+ .resizeFn = rawCResize ,
169
+ };
170
+
171
+ fn rawCAlloc (
172
+ self : * Allocator ,
173
+ len : usize ,
174
+ ptr_align : u29 ,
175
+ len_align : u29 ,
176
+ ret_addr : usize ,
177
+ ) Allocator.Error ! []u8 {
178
+ assert (ptr_align <= @alignOf (std .c .max_align_t ));
179
+ const ptr = @ptrCast ([* ]u8 , c .malloc (len ) orelse return error .OutOfMemory );
180
+ return ptr [0.. len ];
181
+ }
182
+
183
+ fn rawCResize (
184
+ self : * Allocator ,
185
+ buf : []u8 ,
186
+ old_align : u29 ,
187
+ new_len : usize ,
188
+ len_align : u29 ,
189
+ ret_addr : usize ,
190
+ ) Allocator.Error ! usize {
191
+ if (new_len == 0 ) {
192
+ c .free (buf .ptr );
193
+ return 0 ;
194
+ }
195
+ if (new_len <= buf .len ) {
196
+ return mem .alignAllocLen (buf .len , new_len , len_align );
197
+ }
198
+ return error .OutOfMemory ;
199
+ }
200
+
157
201
/// This allocator makes a syscall directly for every allocation and free.
158
202
/// Thread-safe and lock-free.
159
203
pub const page_allocator = if (std .Target .current .isWasm ())
@@ -804,6 +848,12 @@ test "c_allocator" {
804
848
}
805
849
}
806
850
851
+ test "raw_c_allocator" {
852
+ if (builtin .link_libc ) {
853
+ try testAllocator (raw_c_allocator );
854
+ }
855
+ }
856
+
807
857
test "WasmPageAllocator internals" {
808
858
if (comptime std .Target .current .isWasm ()) {
809
859
const conventional_memsize = WasmPageAllocator .conventional .totalPages () * mem .page_size ;
@@ -958,6 +1008,7 @@ test "ThreadSafeFixedBufferAllocator" {
958
1008
try testAllocatorAlignedShrink (& fixed_buffer_allocator .allocator );
959
1009
}
960
1010
1011
+ /// This one should not try alignments that exceed what C malloc can handle.
961
1012
pub fn testAllocator (base_allocator : * mem.Allocator ) ! void {
962
1013
var validationAllocator = mem .validationWrap (base_allocator );
963
1014
const allocator = & validationAllocator .allocator ;
0 commit comments