Skip to content

Commit e57c249

Browse files
committed
std.heap.ArenaAllocator: make State.promote more safe
Previously, std.heap.ArenaAllocator.State.promote required the user to manually copy the arena state back to the original pointer when done with it. The original state also couldn't be used elsewhere until it was copied back. This was a clear footgun, as it's a very easy detail to miss and forgetting to do it would likely cause strange memory corruption issues which could be hard to track down. Now, promote returns a new structure containing a pointer to the original state, and this pointer is mutated whenever an operation is performed through its allocator, removing this footgun. To have a nice API, the returned Promoted struct can be referenced only through const pointers, since the struct itself never needs to be mutated. This allows the direct usage 'state.promote().allocator()' to work safely. However, previously, the std.mem.Allocator API didn't allow a way to do this without resorting to hacks like @ptrToInt, as the type-erased pointer was stored internally as a *anyopaque. Therefore, the definition of Allocator has also been modified to allow storing const pointers through a union. This required a few changes to std, but nothing should be hugely breaking. Resolves: ziglang#13409
1 parent 72d3f4b commit e57c249

File tree

7 files changed

+110
-62
lines changed

7 files changed

+110
-62
lines changed

lib/std/heap.zig

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ const CAllocator = struct {
9797
}
9898

9999
fn alloc(
100-
_: *anyopaque,
100+
_: Allocator.ImplPtr,
101101
len: usize,
102102
alignment: u29,
103103
len_align: u29,
@@ -123,7 +123,7 @@ const CAllocator = struct {
123123
}
124124

125125
fn resize(
126-
_: *anyopaque,
126+
_: Allocator.ImplPtr,
127127
buf: []u8,
128128
buf_align: u29,
129129
new_len: usize,
@@ -145,7 +145,7 @@ const CAllocator = struct {
145145
}
146146

147147
fn free(
148-
_: *anyopaque,
148+
_: Allocator.ImplPtr,
149149
buf: []u8,
150150
buf_align: u29,
151151
return_address: usize,
@@ -185,7 +185,7 @@ const raw_c_allocator_vtable = Allocator.VTable{
185185
};
186186

187187
fn rawCAlloc(
188-
_: *anyopaque,
188+
_: Allocator.ImplPtr,
189189
len: usize,
190190
ptr_align: u29,
191191
len_align: u29,
@@ -199,7 +199,7 @@ fn rawCAlloc(
199199
}
200200

201201
fn rawCResize(
202-
_: *anyopaque,
202+
_: Allocator.ImplPtr,
203203
buf: []u8,
204204
old_align: u29,
205205
new_len: usize,
@@ -215,7 +215,7 @@ fn rawCResize(
215215
}
216216

217217
fn rawCFree(
218-
_: *anyopaque,
218+
_: Allocator.ImplPtr,
219219
buf: []u8,
220220
old_align: u29,
221221
ret_addr: usize,
@@ -257,7 +257,7 @@ const PageAllocator = struct {
257257
.free = free,
258258
};
259259

260-
fn alloc(_: *anyopaque, n: usize, alignment: u29, len_align: u29, ra: usize) error{OutOfMemory}![]u8 {
260+
fn alloc(_: Allocator.ImplPtr, n: usize, alignment: u29, len_align: u29, ra: usize) error{OutOfMemory}![]u8 {
261261
_ = ra;
262262
assert(n > 0);
263263
if (n > maxInt(usize) - (mem.page_size - 1)) {
@@ -358,7 +358,7 @@ const PageAllocator = struct {
358358
}
359359

360360
fn resize(
361-
_: *anyopaque,
361+
_: Allocator.ImplPtr,
362362
buf_unaligned: []u8,
363363
buf_align: u29,
364364
new_size: usize,
@@ -409,7 +409,7 @@ const PageAllocator = struct {
409409
return null;
410410
}
411411

412-
fn free(_: *anyopaque, buf_unaligned: []u8, buf_align: u29, return_address: usize) void {
412+
fn free(_: Allocator.ImplPtr, buf_unaligned: []u8, buf_align: u29, return_address: usize) void {
413413
_ = buf_align;
414414
_ = return_address;
415415

@@ -521,7 +521,7 @@ const WasmPageAllocator = struct {
521521
return mem.alignForward(memsize, mem.page_size) / mem.page_size;
522522
}
523523

524-
fn alloc(_: *anyopaque, len: usize, alignment: u29, len_align: u29, ra: usize) error{OutOfMemory}![]u8 {
524+
fn alloc(_: Allocator.ImplPtr, len: usize, alignment: u29, len_align: u29, ra: usize) error{OutOfMemory}![]u8 {
525525
_ = ra;
526526
if (len > maxInt(usize) - (mem.page_size - 1)) {
527527
return error.OutOfMemory;
@@ -579,7 +579,7 @@ const WasmPageAllocator = struct {
579579
}
580580

581581
fn resize(
582-
_: *anyopaque,
582+
_: Allocator.ImplPtr,
583583
buf: []u8,
584584
buf_align: u29,
585585
new_len: usize,
@@ -600,7 +600,7 @@ const WasmPageAllocator = struct {
600600
}
601601

602602
fn free(
603-
_: *anyopaque,
603+
_: Allocator.ImplPtr,
604604
buf: []u8,
605605
buf_align: u29,
606606
return_address: usize,

lib/std/heap/arena_allocator.zig

Lines changed: 43 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,50 @@ pub const ArenaAllocator = struct {
1515
buffer_list: std.SinglyLinkedList([]u8) = @as(std.SinglyLinkedList([]u8), .{}),
1616
end_index: usize = 0,
1717

18-
pub fn promote(self: State, child_allocator: Allocator) ArenaAllocator {
18+
pub fn promote(self: *State, child_allocator: Allocator) Promoted {
1919
return .{
2020
.child_allocator = child_allocator,
2121
.state = self,
2222
};
2323
}
24+
25+
pub const Promoted = struct {
26+
child_allocator: Allocator,
27+
state: *State,
28+
29+
pub fn allocator(self: *const Promoted) Allocator {
30+
return Allocator.init(self, Promoted.alloc, Promoted.resize, Promoted.free);
31+
}
32+
33+
pub fn deinit(self: Promoted) void {
34+
self.arena().deinit();
35+
}
36+
37+
fn arena(self: Promoted) ArenaAllocator {
38+
return .{
39+
.child_allocator = self.child_allocator,
40+
.state = self.state.*,
41+
};
42+
}
43+
44+
fn alloc(self: *const Promoted, n: usize, ptr_align: u29, len_align: u29, ra: usize) ![]u8 {
45+
var ar = self.arena();
46+
defer self.state.* = ar.state;
47+
return ar.alloc(n, ptr_align, len_align, ra);
48+
}
49+
50+
fn resize(self: *const Promoted, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ra: usize) ?usize {
51+
var ar = self.arena();
52+
defer self.state.* = ar.state;
53+
return ar.resize(buf, buf_align, new_len, len_align, ra);
54+
}
55+
56+
fn free(self: *const Promoted, buf: []u8, buf_align: u29, ra: usize) void {
57+
var ar = self.arena();
58+
defer self.state.* = ar.state;
59+
ar.free(buf, buf_align, ra);
60+
}
61+
};
2462
};
2563

2664
pub fn allocator(self: *ArenaAllocator) Allocator {
@@ -30,7 +68,10 @@ pub const ArenaAllocator = struct {
3068
const BufNode = std.SinglyLinkedList([]u8).Node;
3169

3270
pub fn init(child_allocator: Allocator) ArenaAllocator {
33-
return (State{}).promote(child_allocator);
71+
return .{
72+
.child_allocator = child_allocator,
73+
.state = .{},
74+
};
3475
}
3576

3677
pub fn deinit(self: ArenaAllocator) void {

lib/std/mem.zig

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -151,11 +151,11 @@ const fail_allocator = Allocator{
151151

152152
const failAllocator_vtable = Allocator.VTable{
153153
.alloc = failAllocatorAlloc,
154-
.resize = Allocator.NoResize(anyopaque).noResize,
155-
.free = Allocator.NoOpFree(anyopaque).noOpFree,
154+
.resize = Allocator.NoResize(null).noResize,
155+
.free = Allocator.NoOpFree(null).noOpFree,
156156
};
157157

158-
fn failAllocatorAlloc(_: *anyopaque, n: usize, alignment: u29, len_align: u29, ra: usize) Allocator.Error![]u8 {
158+
fn failAllocatorAlloc(_: Allocator.ImplPtr, n: usize, alignment: u29, len_align: u29, ra: usize) Allocator.Error![]u8 {
159159
_ = n;
160160
_ = alignment;
161161
_ = len_align;

lib/std/mem/Allocator.zig

Lines changed: 37 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,26 @@ const builtin = @import("builtin");
99

1010
pub const Error = error{OutOfMemory};
1111

12-
// The type erased pointer to the allocator implementation
13-
ptr: *anyopaque,
12+
// The type erased pointer to the allocator implementation. Stored in a union to
13+
// allow the pointer to be either const or not without using tricks like
14+
// @ptrToInt, which could hurt optimisation. We use a packed union to avoid safe
15+
// builds from storing a tag with the union, which would increase the size of an
16+
// Allocator and potentially hurt runtime performance.
17+
pub const ImplPtr = packed union {
18+
c: *const anyopaque,
19+
m: *anyopaque,
20+
21+
inline fn convert(self: ImplPtr, comptime Ptr: type) Ptr {
22+
const alignment = std.meta.alignment(Ptr);
23+
24+
return if (comptime std.meta.trait.isConstPtr(Ptr))
25+
@ptrCast(Ptr, @alignCast(alignment, self.c))
26+
else
27+
@ptrCast(Ptr, @alignCast(alignment, self.m));
28+
}
29+
};
30+
31+
ptr: ImplPtr,
1432
vtable: *const VTable,
1533

1634
pub const VTable = struct {
@@ -23,7 +41,7 @@ pub const VTable = struct {
2341
///
2442
/// `ret_addr` is optionally provided as the first return address of the allocation call stack.
2543
/// If the value is `0` it means no return address has been provided.
26-
alloc: std.meta.FnPtr(fn (ptr: *anyopaque, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Error![]u8),
44+
alloc: std.meta.FnPtr(fn (ptr: ImplPtr, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Error![]u8),
2745

2846
/// Attempt to expand or shrink memory in place. `buf.len` must equal the most recent
2947
/// length returned by `alloc` or `resize`. `buf_align` must equal the same value
@@ -42,14 +60,14 @@ pub const VTable = struct {
4260
///
4361
/// `ret_addr` is optionally provided as the first return address of the allocation call stack.
4462
/// If the value is `0` it means no return address has been provided.
45-
resize: std.meta.FnPtr(fn (ptr: *anyopaque, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize),
63+
resize: std.meta.FnPtr(fn (ptr: ImplPtr, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize),
4664

4765
/// Free and invalidate a buffer. `buf.len` must equal the most recent length returned by `alloc` or `resize`.
4866
/// `buf_align` must equal the same value that was passed as the `ptr_align` parameter to the original `alloc` call.
4967
///
5068
/// `ret_addr` is optionally provided as the first return address of the allocation call stack.
5169
/// If the value is `0` it means no return address has been provided.
52-
free: std.meta.FnPtr(fn (ptr: *anyopaque, buf: []u8, buf_align: u29, ret_addr: usize) void),
70+
free: std.meta.FnPtr(fn (ptr: ImplPtr, buf: []u8, buf_align: u29, ret_addr: usize) void),
5371
};
5472

5573
pub fn init(
@@ -64,21 +82,18 @@ pub fn init(
6482
assert(ptr_info == .Pointer); // Must be a pointer
6583
assert(ptr_info.Pointer.size == .One); // Must be a single-item pointer
6684

67-
const alignment = ptr_info.Pointer.alignment;
85+
const impl_ptr: ImplPtr = if (ptr_info.Pointer.is_const) .{ .c = pointer } else .{ .m = pointer };
6886

6987
const gen = struct {
70-
fn allocImpl(ptr: *anyopaque, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Error![]u8 {
71-
const self = @ptrCast(Ptr, @alignCast(alignment, ptr));
72-
return @call(.{ .modifier = .always_inline }, allocFn, .{ self, len, ptr_align, len_align, ret_addr });
88+
fn allocImpl(ptr: ImplPtr, len: usize, ptr_align: u29, len_align: u29, ret_addr: usize) Error![]u8 {
89+
return @call(.{ .modifier = .always_inline }, allocFn, .{ ptr.convert(Ptr), len, ptr_align, len_align, ret_addr });
7390
}
74-
fn resizeImpl(ptr: *anyopaque, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize {
91+
fn resizeImpl(ptr: ImplPtr, buf: []u8, buf_align: u29, new_len: usize, len_align: u29, ret_addr: usize) ?usize {
7592
assert(new_len != 0);
76-
const self = @ptrCast(Ptr, @alignCast(alignment, ptr));
77-
return @call(.{ .modifier = .always_inline }, resizeFn, .{ self, buf, buf_align, new_len, len_align, ret_addr });
93+
return @call(.{ .modifier = .always_inline }, resizeFn, .{ ptr.convert(Ptr), buf, buf_align, new_len, len_align, ret_addr });
7894
}
79-
fn freeImpl(ptr: *anyopaque, buf: []u8, buf_align: u29, ret_addr: usize) void {
80-
const self = @ptrCast(Ptr, @alignCast(alignment, ptr));
81-
@call(.{ .modifier = .always_inline }, freeFn, .{ self, buf, buf_align, ret_addr });
95+
fn freeImpl(ptr: ImplPtr, buf: []u8, buf_align: u29, ret_addr: usize) void {
96+
@call(.{ .modifier = .always_inline }, freeFn, .{ ptr.convert(Ptr), buf, buf_align, ret_addr });
8297
}
8398

8499
const vtable = VTable{
@@ -89,16 +104,16 @@ pub fn init(
89104
};
90105

91106
return .{
92-
.ptr = pointer,
107+
.ptr = impl_ptr,
93108
.vtable = &gen.vtable,
94109
};
95110
}
96111

97112
/// Set resizeFn to `NoResize(AllocatorType).noResize` if in-place resize is not supported.
98-
pub fn NoResize(comptime AllocatorType: type) type {
113+
pub fn NoResize(comptime AllocatorType: ?type) type {
99114
return struct {
100115
pub fn noResize(
101-
self: *AllocatorType,
116+
self: if (AllocatorType) |T| *T else ImplPtr,
102117
buf: []u8,
103118
buf_align: u29,
104119
new_len: usize,
@@ -115,10 +130,10 @@ pub fn NoResize(comptime AllocatorType: type) type {
115130
}
116131

117132
/// Set freeFn to `NoOpFree(AllocatorType).noOpFree` if free is a no-op.
118-
pub fn NoOpFree(comptime AllocatorType: type) type {
133+
pub fn NoOpFree(comptime AllocatorType: ?type) type {
119134
return struct {
120135
pub fn noOpFree(
121-
self: *AllocatorType,
136+
self: if (AllocatorType) |T| *T else ImplPtr,
122137
buf: []u8,
123138
buf_align: u29,
124139
ret_addr: usize,
@@ -132,10 +147,10 @@ pub fn NoOpFree(comptime AllocatorType: type) type {
132147
}
133148

134149
/// Set freeFn to `PanicFree(AllocatorType).panicFree` if free is not a supported operation.
135-
pub fn PanicFree(comptime AllocatorType: type) type {
150+
pub fn PanicFree(comptime AllocatorType: ?type) type {
136151
return struct {
137152
pub fn panicFree(
138-
self: *AllocatorType,
153+
self: if (AllocatorType) |T| *T else ImplPtr,
139154
buf: []u8,
140155
buf_align: u29,
141156
ret_addr: usize,

lib/std/os/uefi/pool_allocator.zig

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ const UefiPoolAllocator = struct {
3232
}
3333

3434
fn alloc(
35-
_: *anyopaque,
35+
_: Allocator.ImplPtr,
3636
len: usize,
3737
ptr_align: u29,
3838
len_align: u29,
@@ -52,7 +52,7 @@ const UefiPoolAllocator = struct {
5252
}
5353

5454
fn resize(
55-
_: *anyopaque,
55+
_: Allocator.ImplPtr,
5656
buf: []u8,
5757
buf_align: u29,
5858
new_len: usize,
@@ -66,7 +66,7 @@ const UefiPoolAllocator = struct {
6666
}
6767

6868
fn free(
69-
_: *anyopaque,
69+
_: Allocator.ImplPtr,
7070
buf: []u8,
7171
buf_align: u29,
7272
ret_addr: usize,
@@ -103,7 +103,7 @@ const raw_pool_allocator_table = Allocator.VTable{
103103
};
104104

105105
fn uefi_alloc(
106-
_: *anyopaque,
106+
_: Allocator.ImplPtr,
107107
len: usize,
108108
ptr_align: u29,
109109
len_align: u29,
@@ -124,7 +124,7 @@ fn uefi_alloc(
124124
}
125125

126126
fn uefi_resize(
127-
_: *anyopaque,
127+
_: Allocator.ImplPtr,
128128
buf: []u8,
129129
old_align: u29,
130130
new_len: usize,
@@ -142,7 +142,7 @@ fn uefi_resize(
142142
}
143143

144144
fn uefi_free(
145-
_: *anyopaque,
145+
_: Allocator.ImplPtr,
146146
buf: []u8,
147147
buf_align: u29,
148148
ret_addr: usize,

src/Module.zig

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5519,9 +5519,7 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air {
55195519
const decl = mod.declPtr(decl_index);
55205520

55215521
// Use the Decl's arena for captured values.
5522-
var decl_arena = decl.value_arena.?.promote(gpa);
5523-
defer decl.value_arena.?.* = decl_arena.state;
5524-
const decl_arena_allocator = decl_arena.allocator();
5522+
const decl_arena_allocator = decl.value_arena.?.promote(gpa).allocator();
55255523

55265524
var sema: Sema = .{
55275525
.mod = mod,

0 commit comments

Comments
 (0)