@@ -103,7 +103,7 @@ const CAllocator = struct {
103
103
}
104
104
105
105
fn alloc (
106
- _ : * anyopaque ,
106
+ _ : ? * anyopaque ,
107
107
len : usize ,
108
108
log2_align : u8 ,
109
109
return_address : usize ,
@@ -114,7 +114,7 @@ const CAllocator = struct {
114
114
}
115
115
116
116
fn resize (
117
- _ : * anyopaque ,
117
+ _ : ? * anyopaque ,
118
118
buf : []u8 ,
119
119
log2_buf_align : u8 ,
120
120
new_len : usize ,
@@ -135,7 +135,7 @@ const CAllocator = struct {
135
135
}
136
136
137
137
fn free (
138
- _ : * anyopaque ,
138
+ _ : ? * anyopaque ,
139
139
buf : []u8 ,
140
140
log2_buf_align : u8 ,
141
141
return_address : usize ,
@@ -150,7 +150,7 @@ const CAllocator = struct {
150
150
/// `malloc_usable_size` if available. For an allocator that directly calls
151
151
/// `malloc`/`free`, see `raw_c_allocator`.
152
152
pub const c_allocator = Allocator {
153
- .ptr = undefined ,
153
+ .ptr = null ,
154
154
.vtable = & c_allocator_vtable ,
155
155
};
156
156
const c_allocator_vtable = Allocator.VTable {
@@ -165,7 +165,7 @@ const c_allocator_vtable = Allocator.VTable{
165
165
/// `ArenaAllocator` for example and is more optimal in such a case
166
166
/// than `c_allocator`.
167
167
pub const raw_c_allocator = Allocator {
168
- .ptr = undefined ,
168
+ .ptr = null ,
169
169
.vtable = & raw_c_allocator_vtable ,
170
170
};
171
171
const raw_c_allocator_vtable = Allocator.VTable {
@@ -175,7 +175,7 @@ const raw_c_allocator_vtable = Allocator.VTable{
175
175
};
176
176
177
177
fn rawCAlloc (
178
- _ : * anyopaque ,
178
+ _ : ? * anyopaque ,
179
179
len : usize ,
180
180
log2_ptr_align : u8 ,
181
181
ret_addr : usize ,
@@ -192,7 +192,7 @@ fn rawCAlloc(
192
192
}
193
193
194
194
fn rawCResize (
195
- _ : * anyopaque ,
195
+ _ : ? * anyopaque ,
196
196
buf : []u8 ,
197
197
log2_old_align : u8 ,
198
198
new_len : usize ,
@@ -213,7 +213,7 @@ fn rawCResize(
213
213
}
214
214
215
215
fn rawCFree (
216
- _ : * anyopaque ,
216
+ _ : ? * anyopaque ,
217
217
buf : []u8 ,
218
218
log2_old_align : u8 ,
219
219
ret_addr : usize ,
@@ -231,17 +231,17 @@ pub const page_allocator = if (@hasDecl(root, "os") and
231
231
root .os .heap .page_allocator
232
232
else if (builtin .target .isWasm ())
233
233
Allocator {
234
- .ptr = undefined ,
234
+ .ptr = null ,
235
235
.vtable = & WasmPageAllocator .vtable ,
236
236
}
237
237
else if (builtin .target .os .tag == .plan9 )
238
238
Allocator {
239
- .ptr = undefined ,
239
+ .ptr = null ,
240
240
.vtable = & SbrkAllocator (std .os .plan9 .sbrk ).vtable ,
241
241
}
242
242
else
243
243
Allocator {
244
- .ptr = undefined ,
244
+ .ptr = null ,
245
245
.vtable = & PageAllocator .vtable ,
246
246
};
247
247
251
251
/// and wasm64 architectures.
252
252
/// Until then, it is available here to play with.
253
253
pub const wasm_allocator = Allocator {
254
- .ptr = undefined ,
254
+ .ptr = null ,
255
255
.vtable = & std .heap .WasmAllocator .vtable ,
256
256
};
257
257
@@ -296,13 +296,13 @@ pub const HeapAllocator = switch (builtin.os.tag) {
296
296
}
297
297
298
298
fn alloc (
299
- ctx : * anyopaque ,
299
+ ctx : ? * anyopaque ,
300
300
n : usize ,
301
301
log2_ptr_align : u8 ,
302
302
return_address : usize ,
303
303
) ? [* ]u8 {
304
304
_ = return_address ;
305
- const self : * HeapAllocator = @ptrCast (@alignCast (ctx ));
305
+ const self : * HeapAllocator = @ptrCast (@alignCast (ctx .? ));
306
306
307
307
const ptr_align = @as (usize , 1 ) << @as (Allocator .Log2Align , @intCast (log2_ptr_align ));
308
308
const amt = n + ptr_align - 1 + @sizeOf (usize );
@@ -323,15 +323,15 @@ pub const HeapAllocator = switch (builtin.os.tag) {
323
323
}
324
324
325
325
fn resize (
326
- ctx : * anyopaque ,
326
+ ctx : ? * anyopaque ,
327
327
buf : []u8 ,
328
328
log2_buf_align : u8 ,
329
329
new_size : usize ,
330
330
return_address : usize ,
331
331
) bool {
332
332
_ = log2_buf_align ;
333
333
_ = return_address ;
334
- const self : * HeapAllocator = @ptrCast (@alignCast (ctx ));
334
+ const self : * HeapAllocator = @ptrCast (@alignCast (ctx .? ));
335
335
336
336
const root_addr = getRecordPtr (buf ).* ;
337
337
const align_offset = @intFromPtr (buf .ptr ) - root_addr ;
@@ -348,14 +348,14 @@ pub const HeapAllocator = switch (builtin.os.tag) {
348
348
}
349
349
350
350
fn free (
351
- ctx : * anyopaque ,
351
+ ctx : ? * anyopaque ,
352
352
buf : []u8 ,
353
353
log2_buf_align : u8 ,
354
354
return_address : usize ,
355
355
) void {
356
356
_ = log2_buf_align ;
357
357
_ = return_address ;
358
- const self : * HeapAllocator = @ptrCast (@alignCast (ctx ));
358
+ const self : * HeapAllocator = @ptrCast (@alignCast (ctx .? ));
359
359
windows .HeapFree (self .heap_handle .? , 0 , @as (* anyopaque , @ptrFromInt (getRecordPtr (buf ).* )));
360
360
}
361
361
},
@@ -423,8 +423,8 @@ pub const FixedBufferAllocator = struct {
423
423
return buf .ptr + buf .len == self .buffer .ptr + self .end_index ;
424
424
}
425
425
426
- fn alloc (ctx : * anyopaque , n : usize , log2_ptr_align : u8 , ra : usize ) ? [* ]u8 {
427
- const self : * FixedBufferAllocator = @ptrCast (@alignCast (ctx ));
426
+ fn alloc (ctx : ? * anyopaque , n : usize , log2_ptr_align : u8 , ra : usize ) ? [* ]u8 {
427
+ const self : * FixedBufferAllocator = @ptrCast (@alignCast (ctx .? ));
428
428
_ = ra ;
429
429
const ptr_align = @as (usize , 1 ) << @as (Allocator .Log2Align , @intCast (log2_ptr_align ));
430
430
const adjust_off = mem .alignPointerOffset (self .buffer .ptr + self .end_index , ptr_align ) orelse return null ;
@@ -436,13 +436,13 @@ pub const FixedBufferAllocator = struct {
436
436
}
437
437
438
438
fn resize (
439
- ctx : * anyopaque ,
439
+ ctx : ? * anyopaque ,
440
440
buf : []u8 ,
441
441
log2_buf_align : u8 ,
442
442
new_size : usize ,
443
443
return_address : usize ,
444
444
) bool {
445
- const self : * FixedBufferAllocator = @ptrCast (@alignCast (ctx ));
445
+ const self : * FixedBufferAllocator = @ptrCast (@alignCast (ctx .? ));
446
446
_ = log2_buf_align ;
447
447
_ = return_address ;
448
448
assert (@inComptime () or self .ownsSlice (buf ));
@@ -466,12 +466,12 @@ pub const FixedBufferAllocator = struct {
466
466
}
467
467
468
468
fn free (
469
- ctx : * anyopaque ,
469
+ ctx : ? * anyopaque ,
470
470
buf : []u8 ,
471
471
log2_buf_align : u8 ,
472
472
return_address : usize ,
473
473
) void {
474
- const self : * FixedBufferAllocator = @ptrCast (@alignCast (ctx ));
474
+ const self : * FixedBufferAllocator = @ptrCast (@alignCast (ctx .? ));
475
475
_ = log2_buf_align ;
476
476
_ = return_address ;
477
477
assert (@inComptime () or self .ownsSlice (buf ));
@@ -481,8 +481,8 @@ pub const FixedBufferAllocator = struct {
481
481
}
482
482
}
483
483
484
- fn threadSafeAlloc (ctx : * anyopaque , n : usize , log2_ptr_align : u8 , ra : usize ) ? [* ]u8 {
485
- const self : * FixedBufferAllocator = @ptrCast (@alignCast (ctx ));
484
+ fn threadSafeAlloc (ctx : ? * anyopaque , n : usize , log2_ptr_align : u8 , ra : usize ) ? [* ]u8 {
485
+ const self : * FixedBufferAllocator = @ptrCast (@alignCast (ctx .? ));
486
486
_ = ra ;
487
487
const ptr_align = @as (usize , 1 ) << @as (Allocator .Log2Align , @intCast (log2_ptr_align ));
488
488
var end_index = @atomicLoad (usize , & self .end_index , .seq_cst );
@@ -551,24 +551,24 @@ pub fn StackFallbackAllocator(comptime size: usize) type {
551
551
pub const allocator = @compileError ("use 'const allocator = stackFallback(N).get();' instead" );
552
552
553
553
fn alloc (
554
- ctx : * anyopaque ,
554
+ ctx : ? * anyopaque ,
555
555
len : usize ,
556
556
log2_ptr_align : u8 ,
557
557
ra : usize ,
558
558
) ? [* ]u8 {
559
- const self : * Self = @ptrCast (@alignCast (ctx ));
559
+ const self : * Self = @ptrCast (@alignCast (ctx .? ));
560
560
return FixedBufferAllocator .alloc (& self .fixed_buffer_allocator , len , log2_ptr_align , ra ) orelse
561
561
return self .fallback_allocator .rawAlloc (len , log2_ptr_align , ra );
562
562
}
563
563
564
564
fn resize (
565
- ctx : * anyopaque ,
565
+ ctx : ? * anyopaque ,
566
566
buf : []u8 ,
567
567
log2_buf_align : u8 ,
568
568
new_len : usize ,
569
569
ra : usize ,
570
570
) bool {
571
- const self : * Self = @ptrCast (@alignCast (ctx ));
571
+ const self : * Self = @ptrCast (@alignCast (ctx .? ));
572
572
if (self .fixed_buffer_allocator .ownsPtr (buf .ptr )) {
573
573
return FixedBufferAllocator .resize (& self .fixed_buffer_allocator , buf , log2_buf_align , new_len , ra );
574
574
} else {
@@ -577,12 +577,12 @@ pub fn StackFallbackAllocator(comptime size: usize) type {
577
577
}
578
578
579
579
fn free (
580
- ctx : * anyopaque ,
580
+ ctx : ? * anyopaque ,
581
581
buf : []u8 ,
582
582
log2_buf_align : u8 ,
583
583
ra : usize ,
584
584
) void {
585
- const self : * Self = @ptrCast (@alignCast (ctx ));
585
+ const self : * Self = @ptrCast (@alignCast (ctx .? ));
586
586
if (self .fixed_buffer_allocator .ownsPtr (buf .ptr )) {
587
587
return FixedBufferAllocator .free (& self .fixed_buffer_allocator , buf , log2_buf_align , ra );
588
588
} else {
0 commit comments