@@ -3,6 +3,7 @@ const builtin = @import("builtin");
3
3
const mem = std .mem ;
4
4
const math = std .math ;
5
5
const assert = std .debug .assert ;
6
+ const codegen = @import ("../../codegen.zig" );
6
7
const Air = @import ("../../Air.zig" );
7
8
const Mir = @import ("Mir.zig" );
8
9
const Emit = @import ("Emit.zig" );
@@ -22,12 +23,14 @@ const leb128 = std.leb;
22
23
const log = std .log .scoped (.codegen );
23
24
const build_options = @import ("build_options" );
24
25
25
- const GenerateSymbolError = @import ( "../../ codegen.zig" ) .GenerateSymbolError ;
26
- const FnResult = @import ( "../../ codegen.zig" ) .FnResult ;
27
- const DebugInfoOutput = @import ( "../../ codegen.zig" ) .DebugInfoOutput ;
26
+ const GenerateSymbolError = codegen .GenerateSymbolError ;
27
+ const FnResult = codegen .FnResult ;
28
+ const DebugInfoOutput = codegen .DebugInfoOutput ;
28
29
29
30
const bits = @import ("bits.zig" );
30
31
const abi = @import ("abi.zig" );
32
+ const errUnionPayloadOffset = codegen .errUnionPayloadOffset ;
33
+ const errUnionErrorOffset = codegen .errUnionErrorOffset ;
31
34
const RegisterManager = abi .RegisterManager ;
32
35
const RegisterLock = RegisterManager .RegisterLock ;
33
36
const Register = bits .Register ;
@@ -3272,7 +3275,14 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
3272
3275
3273
3276
fn ret (self : * Self , mcv : MCValue ) ! void {
3274
3277
const ret_ty = self .fn_type .fnReturnType ();
3275
- try self .setRegOrMem (ret_ty , self .ret_mcv , mcv );
3278
+ switch (self .ret_mcv ) {
3279
+ .immediate = > {
3280
+ assert (ret_ty .isError ());
3281
+ },
3282
+ else = > {
3283
+ try self .setRegOrMem (ret_ty , self .ret_mcv , mcv );
3284
+ },
3285
+ }
3276
3286
// Just add space for an instruction, patch this later
3277
3287
const index = try self .addInst (.{
3278
3288
.tag = .nop ,
@@ -3601,30 +3611,39 @@ fn isErr(self: *Self, ty: Type, operand: MCValue) !MCValue {
3601
3611
const error_type = ty .errorUnionSet ();
3602
3612
const payload_type = ty .errorUnionPayload ();
3603
3613
3604
- if (! error_type .hasRuntimeBits () ) {
3614
+ if (error_type .errorSetCardinality () == .zero ) {
3605
3615
return MCValue { .immediate = 0 }; // always false
3606
- } else if (! payload_type .hasRuntimeBits ()) {
3607
- if (error_type .abiSize (self .target .* ) <= 8 ) {
3608
- const reg_mcv : MCValue = switch (operand ) {
3609
- .register = > operand ,
3610
- else = > .{ .register = try self .copyToTmpRegister (error_type , operand ) },
3611
- };
3616
+ }
3612
3617
3618
+ const err_off = errUnionErrorOffset (payload_type , self .target .* );
3619
+ switch (operand ) {
3620
+ .stack_offset = > | off | {
3621
+ const offset = off - @intCast (u32 , err_off );
3622
+ const tmp_reg = try self .copyToTmpRegister (Type .anyerror , .{ .stack_offset = offset });
3613
3623
_ = try self .addInst (.{
3614
3624
.tag = .cmp_immediate ,
3615
3625
.data = .{ .r_imm12_sh = .{
3616
- .rn = reg_mcv . register ,
3626
+ .rn = tmp_reg ,
3617
3627
.imm12 = 0 ,
3618
3628
} },
3619
3629
});
3620
-
3621
- return MCValue { .compare_flags_unsigned = .gt };
3622
- } else {
3623
- return self .fail ("TODO isErr for errors with size > 8" , .{});
3624
- }
3625
- } else {
3626
- return self .fail ("TODO isErr for non-empty payloads" , .{});
3630
+ },
3631
+ .register = > | reg | {
3632
+ if (err_off > 0 or payload_type .hasRuntimeBitsIgnoreComptime ()) {
3633
+ return self .fail ("TODO implement isErr for register operand with payload bits" , .{});
3634
+ }
3635
+ _ = try self .addInst (.{
3636
+ .tag = .cmp_immediate ,
3637
+ .data = .{ .r_imm12_sh = .{
3638
+ .rn = reg ,
3639
+ .imm12 = 0 ,
3640
+ } },
3641
+ });
3642
+ },
3643
+ else = > return self .fail ("TODO implement isErr for {}" , .{operand }),
3627
3644
}
3645
+
3646
+ return MCValue { .compare_flags_unsigned = .gt };
3628
3647
}
3629
3648
3630
3649
fn isNonErr (self : * Self , ty : Type , operand : MCValue ) ! MCValue {
@@ -4483,15 +4502,15 @@ fn resolveInst(self: *Self, inst: Air.Inst.Ref) InnerError!MCValue {
4483
4502
const ref_int = @enumToInt (inst );
4484
4503
if (ref_int < Air .Inst .Ref .typed_value_map .len ) {
4485
4504
const tv = Air .Inst .Ref .typed_value_map [ref_int ];
4486
- if (! tv .ty .hasRuntimeBits ()) {
4505
+ if (! tv .ty .hasRuntimeBitsIgnoreComptime () and ! tv . ty . isError ()) {
4487
4506
return MCValue { .none = {} };
4488
4507
}
4489
4508
return self .genTypedValue (tv );
4490
4509
}
4491
4510
4492
4511
// If the type has no codegen bits, no need to store it.
4493
4512
const inst_ty = self .air .typeOf (inst );
4494
- if (! inst_ty .hasRuntimeBits ())
4513
+ if (! inst_ty .hasRuntimeBitsIgnoreComptime () and ! inst_ty . isError ())
4495
4514
return MCValue { .none = {} };
4496
4515
4497
4516
const inst_index = @intCast (Air .Inst .Index , ref_int - Air .Inst .Ref .typed_value_map .len );
@@ -4674,32 +4693,38 @@ fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
4674
4693
}
4675
4694
},
4676
4695
.ErrorSet = > {
4677
- const err_name = typed_value .val .castTag (.@"error" ).? .data .name ;
4678
- const module = self .bin_file .options .module .? ;
4679
- const global_error_set = module .global_error_set ;
4680
- const error_index = global_error_set .get (err_name ).? ;
4681
- return MCValue { .immediate = error_index };
4696
+ switch (typed_value .val .tag ()) {
4697
+ .@"error" = > {
4698
+ const err_name = typed_value .val .castTag (.@"error" ).? .data .name ;
4699
+ const module = self .bin_file .options .module .? ;
4700
+ const global_error_set = module .global_error_set ;
4701
+ const error_index = global_error_set .get (err_name ).? ;
4702
+ return MCValue { .immediate = error_index };
4703
+ },
4704
+ else = > {
4705
+ // In this case we are rendering an error union which has a 0 bits payload.
4706
+ return MCValue { .immediate = 0 };
4707
+ },
4708
+ }
4682
4709
},
4683
4710
.ErrorUnion = > {
4684
4711
const error_type = typed_value .ty .errorUnionSet ();
4685
4712
const payload_type = typed_value .ty .errorUnionPayload ();
4686
4713
4687
- if (typed_value .val .castTag (.eu_payload )) | pl | {
4688
- if (! payload_type .hasRuntimeBits ()) {
4689
- // We use the error type directly as the type.
4690
- return MCValue { .immediate = 0 };
4691
- }
4714
+ if (error_type .errorSetCardinality () == .zero ) {
4715
+ const payload_val = typed_value .val .castTag (.eu_payload ).? .data ;
4716
+ return self .genTypedValue (.{ .ty = payload_type , .val = payload_val });
4717
+ }
4692
4718
4693
- _ = pl ;
4694
- return self .fail ("TODO implement error union const of type '{}' (non-error)" , .{typed_value .ty .fmtDebug ()});
4695
- } else {
4696
- if (! payload_type .hasRuntimeBits ()) {
4697
- // We use the error type directly as the type.
4698
- return self .genTypedValue (.{ .ty = error_type , .val = typed_value .val });
4699
- }
4719
+ const is_pl = typed_value .val .errorUnionIsPayload ();
4700
4720
4701
- return self .fail ("TODO implement error union const of type '{}' (error)" , .{typed_value .ty .fmtDebug ()});
4721
+ if (! payload_type .hasRuntimeBitsIgnoreComptime ()) {
4722
+ // We use the error type directly as the type.
4723
+ const err_val = if (! is_pl ) typed_value .val else Value .initTag (.zero );
4724
+ return self .genTypedValue (.{ .ty = error_type , .val = err_val });
4702
4725
}
4726
+
4727
+ return self .lowerUnnamedConst (typed_value );
4703
4728
},
4704
4729
.Struct = > {
4705
4730
return self .lowerUnnamedConst (typed_value );
@@ -4796,13 +4821,16 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) !CallMCValues {
4796
4821
4797
4822
if (ret_ty .zigTypeTag () == .NoReturn ) {
4798
4823
result .return_value = .{ .unreach = {} };
4799
- } else if (! ret_ty .hasRuntimeBits ()) {
4824
+ } else if (! ret_ty .hasRuntimeBitsIgnoreComptime () and ! ret_ty . isError ()) {
4800
4825
result .return_value = .{ .none = {} };
4801
4826
} else switch (cc ) {
4802
4827
.Naked = > unreachable ,
4803
4828
.Unspecified , .C = > {
4804
4829
const ret_ty_size = @intCast (u32 , ret_ty .abiSize (self .target .* ));
4805
- if (ret_ty_size <= 8 ) {
4830
+ if (ret_ty_size == 0 ) {
4831
+ assert (ret_ty .isError ());
4832
+ result .return_value = .{ .immediate = 0 };
4833
+ } else if (ret_ty_size <= 8 ) {
4806
4834
result .return_value = .{ .register = registerAlias (c_abi_int_return_regs [0 ], ret_ty_size ) };
4807
4835
} else {
4808
4836
return self .fail ("TODO support more return types for ARM backend" , .{});
0 commit comments