@@ -152,6 +152,7 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
152
152
0 ,
153
153
tree .containerDeclRoot (),
154
154
.Auto ,
155
+ 0 ,
155
156
)) | struct_decl_ref | {
156
157
assert (refToIndex (struct_decl_ref ).? == 0 );
157
158
} else | err | switch (err ) {
@@ -4224,15 +4225,18 @@ fn structDeclInner(
4224
4225
node : Ast.Node.Index ,
4225
4226
container_decl : Ast.full.ContainerDecl ,
4226
4227
layout : std.builtin.Type.ContainerLayout ,
4228
+ backing_int_node : Ast.Node.Index ,
4227
4229
) InnerError ! Zir.Inst.Ref {
4228
4230
const decl_inst = try gz .reserveInstructionIndex ();
4229
4231
4230
- if (container_decl .ast .members .len == 0 ) {
4232
+ if (container_decl .ast .members .len == 0 and backing_int_node == 0 ) {
4231
4233
try gz .setStruct (decl_inst , .{
4232
4234
.src_node = node ,
4233
4235
.layout = layout ,
4234
4236
.fields_len = 0 ,
4235
4237
.decls_len = 0 ,
4238
+ .backing_int_ref = .none ,
4239
+ .backing_int_body_len = 0 ,
4236
4240
.known_non_opv = false ,
4237
4241
.known_comptime_only = false ,
4238
4242
});
@@ -4267,6 +4271,34 @@ fn structDeclInner(
4267
4271
};
4268
4272
defer block_scope .unstack ();
4269
4273
4274
+ const scratch_top = astgen .scratch .items .len ;
4275
+ var backing_int_body_inst : []Zir.Inst.Index = &[0 ]Zir.Inst.Index {};
4276
+ defer astgen .scratch .items .len = scratch_top ;
4277
+ const backing_int_ref : Zir.Inst.Ref = blk : {
4278
+ if (backing_int_node != 0 ) {
4279
+ if (layout != .Packed ) {
4280
+ return astgen .failNode (backing_int_node , "non-packed struct does not support backing integer type" , .{});
4281
+ } else {
4282
+ const backing_int_ref = try typeExpr (& block_scope , & namespace .base , backing_int_node );
4283
+ if (! block_scope .isEmpty ()) {
4284
+ if (! block_scope .endsWithNoReturn ()) {
4285
+ _ = try block_scope .addBreak (.break_inline , decl_inst , backing_int_ref );
4286
+ }
4287
+
4288
+ const body = block_scope .instructionsSlice ();
4289
+ const old_scratch_len = astgen .scratch .items .len ;
4290
+ try astgen .scratch .ensureUnusedCapacity (gpa , countBodyLenAfterFixups (astgen , body ));
4291
+ appendBodyWithFixupsArrayList (astgen , & astgen .scratch , body );
4292
+ backing_int_body_inst = astgen .scratch .items [old_scratch_len .. ];
4293
+ block_scope .instructions .items .len = block_scope .instructions_top ;
4294
+ }
4295
+ break :blk backing_int_ref ;
4296
+ }
4297
+ } else {
4298
+ break :blk .none ;
4299
+ }
4300
+ };
4301
+
4270
4302
const decl_count = try astgen .scanDecls (& namespace , container_decl .ast .members );
4271
4303
const field_count = @intCast (u32 , container_decl .ast .members .len - decl_count );
4272
4304
@@ -4379,6 +4411,8 @@ fn structDeclInner(
4379
4411
.layout = layout ,
4380
4412
.fields_len = field_count ,
4381
4413
.decls_len = decl_count ,
4414
+ .backing_int_ref = backing_int_ref ,
4415
+ .backing_int_body_len = @intCast (u32 , backing_int_body_inst .len ),
4382
4416
.known_non_opv = known_non_opv ,
4383
4417
.known_comptime_only = known_comptime_only ,
4384
4418
});
@@ -4387,7 +4421,9 @@ fn structDeclInner(
4387
4421
const decls_slice = wip_members .declsSlice ();
4388
4422
const fields_slice = wip_members .fieldsSlice ();
4389
4423
const bodies_slice = astgen .scratch .items [bodies_start .. ];
4390
- try astgen .extra .ensureUnusedCapacity (gpa , decls_slice .len + fields_slice .len + bodies_slice .len );
4424
+ try astgen .extra .ensureUnusedCapacity (gpa , backing_int_body_inst .len +
4425
+ decls_slice .len + fields_slice .len + bodies_slice .len );
4426
+ astgen .extra .appendSliceAssumeCapacity (backing_int_body_inst );
4391
4427
astgen .extra .appendSliceAssumeCapacity (decls_slice );
4392
4428
astgen .extra .appendSliceAssumeCapacity (fields_slice );
4393
4429
astgen .extra .appendSliceAssumeCapacity (bodies_slice );
@@ -4583,9 +4619,7 @@ fn containerDecl(
4583
4619
else = > unreachable ,
4584
4620
} else std .builtin .Type .ContainerLayout .Auto ;
4585
4621
4586
- assert (container_decl .ast .arg == 0 );
4587
-
4588
- const result = try structDeclInner (gz , scope , node , container_decl , layout );
4622
+ const result = try structDeclInner (gz , scope , node , container_decl , layout , container_decl .ast .arg );
4589
4623
return rvalue (gz , rl , result , node );
4590
4624
},
4591
4625
.keyword_union = > {
@@ -11234,14 +11268,16 @@ const GenZir = struct {
11234
11268
src_node : Ast.Node.Index ,
11235
11269
fields_len : u32 ,
11236
11270
decls_len : u32 ,
11271
+ backing_int_ref : Zir.Inst.Ref ,
11272
+ backing_int_body_len : u32 ,
11237
11273
layout : std.builtin.Type.ContainerLayout ,
11238
11274
known_non_opv : bool ,
11239
11275
known_comptime_only : bool ,
11240
11276
}) ! void {
11241
11277
const astgen = gz .astgen ;
11242
11278
const gpa = astgen .gpa ;
11243
11279
11244
- try astgen .extra .ensureUnusedCapacity (gpa , 4 );
11280
+ try astgen .extra .ensureUnusedCapacity (gpa , 6 );
11245
11281
const payload_index = @intCast (u32 , astgen .extra .items .len );
11246
11282
11247
11283
if (args .src_node != 0 ) {
@@ -11254,6 +11290,12 @@ const GenZir = struct {
11254
11290
if (args .decls_len != 0 ) {
11255
11291
astgen .extra .appendAssumeCapacity (args .decls_len );
11256
11292
}
11293
+ if (args .backing_int_ref != .none ) {
11294
+ astgen .extra .appendAssumeCapacity (args .backing_int_body_len );
11295
+ if (args .backing_int_body_len == 0 ) {
11296
+ astgen .extra .appendAssumeCapacity (@enumToInt (args .backing_int_ref ));
11297
+ }
11298
+ }
11257
11299
astgen .instructions .set (inst , .{
11258
11300
.tag = .extended ,
11259
11301
.data = .{ .extended = .{
@@ -11262,6 +11304,7 @@ const GenZir = struct {
11262
11304
.has_src_node = args .src_node != 0 ,
11263
11305
.has_fields_len = args .fields_len != 0 ,
11264
11306
.has_decls_len = args .decls_len != 0 ,
11307
+ .has_backing_int = args .backing_int_ref != .none ,
11265
11308
.known_non_opv = args .known_non_opv ,
11266
11309
.known_comptime_only = args .known_comptime_only ,
11267
11310
.name_strategy = gz .anon_name_strategy ,
0 commit comments