4
4
//!
5
5
//! ### `OptimizationMode.debug` and `OptimizationMode.release_safe`:
6
6
//!
7
- //! * Detect double free, and print stack trace of:
7
+ //! * Detect double free, and emit stack trace of:
8
8
//! - Where it was first allocated
9
9
//! - Where it was freed the first time
10
10
//! - Where it was freed the second time
11
11
//!
12
- //! * Detect leaks and print stack trace of:
12
+ //! * Detect leaks and emit stack trace of:
13
13
//! - Where it was allocated
14
14
//!
15
15
//! * When a page of memory is no longer needed, give it back to resident memory
@@ -178,15 +178,18 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
178
178
stack_addresses : [stack_n ]usize ,
179
179
180
180
fn dumpStackTrace (self : * LargeAlloc ) void {
181
+ std .debug .dumpStackTrace (self .getStackTrace ());
182
+ }
183
+
184
+ fn getStackTrace (self : * LargeAlloc ) std.builtin.StackTrace {
181
185
var len : usize = 0 ;
182
186
while (len < stack_n and self .stack_addresses [len ] != 0 ) {
183
187
len += 1 ;
184
188
}
185
- const stack_trace = StackTrace {
189
+ return . {
186
190
.instruction_addresses = & self .stack_addresses ,
187
191
.index = len ,
188
192
};
189
- std .debug .dumpStackTrace (stack_trace );
190
193
}
191
194
};
192
195
const LargeAllocTable = std .AutoHashMapUnmanaged (usize , LargeAlloc );
@@ -282,15 +285,9 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
282
285
while (true ) : (bit_index += 1 ) {
283
286
const is_used = @truncate (u1 , used_byte >> bit_index ) != 0 ;
284
287
if (is_used ) {
285
- std .debug .print ("\n Memory leak detected:\n " , .{});
286
288
const slot_index = @intCast (SlotIndex , used_bits_byte * 8 + bit_index );
287
- const stack_trace = bucketStackTrace (
288
- bucket ,
289
- size_class ,
290
- slot_index ,
291
- .alloc ,
292
- );
293
- std .debug .dumpStackTrace (stack_trace );
289
+ const stack_trace = bucketStackTrace (bucket , size_class , slot_index , .alloc );
290
+ std .log .err (.std , "Memory leak detected: {}" , .{stack_trace });
294
291
leaks = true ;
295
292
}
296
293
if (bit_index == math .maxInt (u3 ))
@@ -301,8 +298,8 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
301
298
return leaks ;
302
299
}
303
300
304
- /// Returns whether there were leaks.
305
- pub fn deinit (self : * Self ) bool {
301
+ /// Emits log messages for leaks and then returns whether there were any leaks.
302
+ pub fn detectLeaks (self : * Self ) bool {
306
303
var leaks = false ;
307
304
for (self .buckets ) | optional_bucket , bucket_i | {
308
305
const first_bucket = optional_bucket orelse continue ;
@@ -317,10 +314,14 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
317
314
}
318
315
}
319
316
for (self .large_allocations .items ()) | * large_alloc | {
320
- std .debug .print ("\n Memory leak detected (0x{x}):\n " , .{@ptrToInt (large_alloc .value .bytes .ptr )});
321
- large_alloc .value .dumpStackTrace ();
317
+ std .log .err (.std , "Memory leak detected: {}" , .{large_alloc .value .getStackTrace ()});
322
318
leaks = true ;
323
319
}
320
+ return leaks ;
321
+ }
322
+
323
+ pub fn deinit (self : * Self ) bool {
324
+ const leaks = if (config .safety ) self .detectLeaks () else false ;
324
325
self .large_allocations .deinit (self .backing_allocator );
325
326
self .* = undefined ;
326
327
return leaks ;
@@ -442,13 +443,18 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
442
443
};
443
444
444
445
if (config .safety and old_mem .len != entry .value .bytes .len ) {
445
- std .debug .print ("\n Allocation size {} bytes does not match free size {}. Allocated here:\n " , .{
446
+ var addresses : [stack_n ]usize = [1 ]usize {0 } ** stack_n ;
447
+ var free_stack_trace = StackTrace {
448
+ .instruction_addresses = & addresses ,
449
+ .index = 0 ,
450
+ };
451
+ std .debug .captureStackTrace (ret_addr , & free_stack_trace );
452
+ std .log .err (.std , "Allocation size {} bytes does not match free size {}. Allocation: {} Free: {}" , .{
446
453
entry .value .bytes .len ,
447
454
old_mem .len ,
455
+ entry .value .getStackTrace (),
456
+ free_stack_trace ,
448
457
});
449
- entry .value .dumpStackTrace ();
450
-
451
- @panic ("\n Free here:" );
452
458
}
453
459
454
460
const result_len = try self .backing_allocator .resizeFn (self .backing_allocator , old_mem , old_align , new_size , len_align , ret_addr );
@@ -518,14 +524,24 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
518
524
const is_used = @truncate (u1 , used_byte .* >> used_bit_index ) != 0 ;
519
525
if (! is_used ) {
520
526
if (config .safety ) {
521
- // print allocation stack trace
522
- std .debug .print ("\n Double free detected, allocated here:\n " , .{});
523
527
const alloc_stack_trace = bucketStackTrace (bucket , size_class , slot_index , .alloc );
524
- std .debug .dumpStackTrace (alloc_stack_trace );
525
- std .debug .print ("\n First free here:\n " , .{});
526
528
const free_stack_trace = bucketStackTrace (bucket , size_class , slot_index , .free );
527
- std .debug .dumpStackTrace (free_stack_trace );
528
- @panic ("\n Second free here:" );
529
+ var addresses : [stack_n ]usize = [1 ]usize {0 } ** stack_n ;
530
+ var second_free_stack_trace = StackTrace {
531
+ .instruction_addresses = & addresses ,
532
+ .index = 0 ,
533
+ };
534
+ std .debug .captureStackTrace (ret_addr , & second_free_stack_trace );
535
+ std .log .err (.std , "Double free detected. Allocation: {} First free: {} Second free: {}" , .{
536
+ alloc_stack_trace ,
537
+ free_stack_trace ,
538
+ second_free_stack_trace ,
539
+ });
540
+ if (new_size == 0 ) {
541
+ // Recoverable.
542
+ return @as (usize , 0 );
543
+ }
544
+ @panic ("Unrecoverable double free" );
529
545
} else {
530
546
unreachable ;
531
547
}
0 commit comments