@@ -359,12 +359,9 @@ pub const Tokenizer = struct {
359
359
};
360
360
};
361
361
362
- // stage1 compiler support
363
- var stage2_da = std .heap .DirectAllocator .init ();
364
-
365
362
export fn stage2_DepTokenizer_init (input : [* ]const u8 , len : usize ) stage2_DepTokenizer {
366
- const t = stage2_da . allocator . create (Tokenizer ) catch unreachable ;
367
- t .* = Tokenizer .init (& stage2_da . allocator , input [0.. len ]);
363
+ const t = std . heap . c_allocator . create (Tokenizer ) catch @panic ( "failed to create .d tokenizer" ) ;
364
+ t .* = Tokenizer .init (std . heap . c_allocator , input [0.. len ]);
368
365
return stage2_DepTokenizer {
369
366
.handle = t ,
370
367
};
@@ -376,20 +373,25 @@ export fn stage2_DepTokenizer_deinit(self: *stage2_DepTokenizer) void {
376
373
377
374
export fn stage2_DepTokenizer_next (self : * stage2_DepTokenizer ) stage2_DepNextResult {
378
375
const otoken = self .handle .next () catch {
376
+ const textz = std .Buffer .init (& self .handle .arena .allocator , self .handle .error_text ) catch @panic ("failed to create .d tokenizer error text" );
379
377
return stage2_DepNextResult {
380
- .ent = 0 ,
381
- .textz = ( std . Buffer . init ( & self . handle . arena . allocator , self . handle . error_text ) catch unreachable ) .toSlice ().ptr ,
378
+ .type_id = .error_ ,
379
+ .textz = textz .toSlice ().ptr ,
382
380
};
383
381
};
384
382
const token = otoken orelse {
385
383
return stage2_DepNextResult {
386
- .ent = 1 ,
384
+ .type_id = .null_ ,
387
385
.textz = undefined ,
388
386
};
389
387
};
388
+ const textz = std .Buffer .init (& self .handle .arena .allocator , token .bytes ) catch @panic ("failed to create .d tokenizer token text" );
390
389
return stage2_DepNextResult {
391
- .ent = @enumToInt (token .id ) + u8 (2 ),
392
- .textz = (std .Buffer .init (& self .handle .arena .allocator , token .bytes ) catch unreachable ).toSlice ().ptr ,
390
+ .type_id = switch (token .id ) {
391
+ .target = > stage2_DepNextResult .TypeId .target ,
392
+ .prereq = > stage2_DepNextResult .TypeId .prereq ,
393
+ },
394
+ .textz = textz .toSlice ().ptr ,
393
395
};
394
396
}
395
397
@@ -398,13 +400,20 @@ export const stage2_DepTokenizer = extern struct {
398
400
};
399
401
400
402
export const stage2_DepNextResult = extern struct {
401
- // 0=error, 1=null, 2=token=target, 3=token=prereq
402
- ent : u8 ,
403
- // ent=0 -- error text
404
- // ent=1 -- NEVER
405
- // ent=2 -- token text value
406
- // ent=3 -- token text value
403
+ type_id : TypeId ,
404
+
405
+ // when type_id == error --> error text
406
+ // when type_id == null --> undefined
407
+ // when type_id == target --> target pathname
408
+ // when type_id == prereq --> prereq pathname
407
409
textz : [* ]const u8 ,
410
+
411
+ export const TypeId = extern enum {
412
+ error_ ,
413
+ null_ ,
414
+ target ,
415
+ prereq ,
416
+ };
408
417
};
409
418
410
419
test "empty file" {
0 commit comments