@@ -321,51 +321,57 @@ pub const FixedBufferAllocator = struct {
321
321
fn free (allocator : * Allocator , bytes : []u8 ) void {}
322
322
};
323
323
324
- /// lock free
325
- pub const ThreadSafeFixedBufferAllocator = struct {
326
- allocator : Allocator ,
327
- end_index : usize ,
328
- buffer : []u8 ,
324
+ pub const ThreadSafeFixedBufferAllocator = blk : {
325
+ if (builtin .single_threaded ) {
326
+ break :blk FixedBufferAllocator ;
327
+ } else {
328
+ /// lock free
329
+ break :blk struct {
330
+ allocator : Allocator ,
331
+ end_index : usize ,
332
+ buffer : []u8 ,
333
+
334
+ pub fn init (buffer : []u8 ) ThreadSafeFixedBufferAllocator {
335
+ return ThreadSafeFixedBufferAllocator {
336
+ .allocator = Allocator {
337
+ .allocFn = alloc ,
338
+ .reallocFn = realloc ,
339
+ .freeFn = free ,
340
+ },
341
+ .buffer = buffer ,
342
+ .end_index = 0 ,
343
+ };
344
+ }
329
345
330
- pub fn init (buffer : []u8 ) ThreadSafeFixedBufferAllocator {
331
- return ThreadSafeFixedBufferAllocator {
332
- .allocator = Allocator {
333
- .allocFn = alloc ,
334
- .reallocFn = realloc ,
335
- .freeFn = free ,
336
- },
337
- .buffer = buffer ,
338
- .end_index = 0 ,
339
- };
340
- }
346
+ fn alloc (allocator : * Allocator , n : usize , alignment : u29 ) ! []u8 {
347
+ const self = @fieldParentPtr (ThreadSafeFixedBufferAllocator , "allocator" , allocator );
348
+ var end_index = @atomicLoad (usize , & self .end_index , builtin .AtomicOrder .SeqCst );
349
+ while (true ) {
350
+ const addr = @ptrToInt (self .buffer .ptr ) + end_index ;
351
+ const rem = @rem (addr , alignment );
352
+ const march_forward_bytes = if (rem == 0 ) 0 else (alignment - rem );
353
+ const adjusted_index = end_index + march_forward_bytes ;
354
+ const new_end_index = adjusted_index + n ;
355
+ if (new_end_index > self .buffer .len ) {
356
+ return error .OutOfMemory ;
357
+ }
358
+ end_index = @cmpxchgWeak (usize , & self .end_index , end_index , new_end_index , builtin .AtomicOrder .SeqCst , builtin .AtomicOrder .SeqCst ) orelse return self .buffer [adjusted_index .. new_end_index ];
359
+ }
360
+ }
341
361
342
- fn alloc (allocator : * Allocator , n : usize , alignment : u29 ) ! []u8 {
343
- const self = @fieldParentPtr (ThreadSafeFixedBufferAllocator , "allocator" , allocator );
344
- var end_index = @atomicLoad (usize , & self .end_index , builtin .AtomicOrder .SeqCst );
345
- while (true ) {
346
- const addr = @ptrToInt (self .buffer .ptr ) + end_index ;
347
- const rem = @rem (addr , alignment );
348
- const march_forward_bytes = if (rem == 0 ) 0 else (alignment - rem );
349
- const adjusted_index = end_index + march_forward_bytes ;
350
- const new_end_index = adjusted_index + n ;
351
- if (new_end_index > self .buffer .len ) {
352
- return error .OutOfMemory ;
362
+ fn realloc (allocator : * Allocator , old_mem : []u8 , new_size : usize , alignment : u29 ) ! []u8 {
363
+ if (new_size <= old_mem .len ) {
364
+ return old_mem [0.. new_size ];
365
+ } else {
366
+ const result = try alloc (allocator , new_size , alignment );
367
+ mem .copy (u8 , result , old_mem );
368
+ return result ;
369
+ }
353
370
}
354
- end_index = @cmpxchgWeak (usize , & self .end_index , end_index , new_end_index , builtin .AtomicOrder .SeqCst , builtin .AtomicOrder .SeqCst ) orelse return self .buffer [adjusted_index .. new_end_index ];
355
- }
356
- }
357
371
358
- fn realloc (allocator : * Allocator , old_mem : []u8 , new_size : usize , alignment : u29 ) ! []u8 {
359
- if (new_size <= old_mem .len ) {
360
- return old_mem [0.. new_size ];
361
- } else {
362
- const result = try alloc (allocator , new_size , alignment );
363
- mem .copy (u8 , result , old_mem );
364
- return result ;
365
- }
372
+ fn free (allocator : * Allocator , bytes : []u8 ) void {}
373
+ };
366
374
}
367
-
368
- fn free (allocator : * Allocator , bytes : []u8 ) void {}
369
375
};
370
376
371
377
pub fn stackFallback (comptime size : usize , fallback_allocator : * Allocator ) StackFallbackAllocator (size ) {
0 commit comments