@@ -10463,13 +10463,6 @@ static IrInstruction *ir_implicit_cast(IrAnalyze *ira, IrInstruction *value, Typ
10463
10463
zig_unreachable();
10464
10464
}
10465
10465
10466
- static IrInstruction *ir_implicit_byval_const_ref_cast(IrAnalyze *ira, IrInstruction *inst) {
10467
- if (type_is_copyable(ira->codegen, inst->value.type))
10468
- return inst;
10469
- TypeTableEntry *const_ref_type = get_pointer_to_type(ira->codegen, inst->value.type, true);
10470
- return ir_implicit_cast(ira, inst, const_ref_type);
10471
- }
10472
-
10473
10466
static IrInstruction *ir_get_deref(IrAnalyze *ira, IrInstruction *source_instruction, IrInstruction *ptr) {
10474
10467
TypeTableEntry *type_entry = ptr->value.type;
10475
10468
if (type_is_invalid(type_entry)) {
@@ -12283,7 +12276,7 @@ static bool ir_analyze_fn_call_generic_arg(IrAnalyze *ira, AstNode *fn_proto_nod
12283
12276
IrInstruction *casted_arg;
12284
12277
if (is_var_args) {
12285
12278
arg_part_of_generic_id = true;
12286
- casted_arg = ir_implicit_byval_const_ref_cast(ira, arg) ;
12279
+ casted_arg = arg;
12287
12280
} else {
12288
12281
if (param_decl_node->data.param_decl.var_token == nullptr) {
12289
12282
AstNode *param_type_node = param_decl_node->data.param_decl.type;
@@ -12296,7 +12289,7 @@ static bool ir_analyze_fn_call_generic_arg(IrAnalyze *ira, AstNode *fn_proto_nod
12296
12289
return false;
12297
12290
} else {
12298
12291
arg_part_of_generic_id = true;
12299
- casted_arg = ir_implicit_byval_const_ref_cast(ira, arg) ;
12292
+ casted_arg = arg;
12300
12293
}
12301
12294
}
12302
12295
@@ -12515,9 +12508,18 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
12515
12508
12516
12509
size_t next_proto_i = 0;
12517
12510
if (first_arg_ptr) {
12518
- IrInstruction *first_arg;
12519
12511
assert(first_arg_ptr->value.type->id == TypeTableEntryIdPointer);
12520
- if (handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
12512
+
12513
+ bool first_arg_known_bare = false;
12514
+ if (fn_type_id->next_param_index >= 1) {
12515
+ TypeTableEntry *param_type = fn_type_id->param_info[next_proto_i].type;
12516
+ if (type_is_invalid(param_type))
12517
+ return ira->codegen->builtin_types.entry_invalid;
12518
+ first_arg_known_bare = param_type->id != TypeTableEntryIdPointer;
12519
+ }
12520
+
12521
+ IrInstruction *first_arg;
12522
+ if (!first_arg_known_bare && handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
12521
12523
first_arg = first_arg_ptr;
12522
12524
} else {
12523
12525
first_arg = ir_get_deref(ira, first_arg_ptr, first_arg_ptr);
@@ -12667,9 +12669,18 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
12667
12669
size_t next_proto_i = 0;
12668
12670
12669
12671
if (first_arg_ptr) {
12670
- IrInstruction *first_arg;
12671
12672
assert(first_arg_ptr->value.type->id == TypeTableEntryIdPointer);
12672
- if (handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
12673
+
12674
+ bool first_arg_known_bare = false;
12675
+ if (fn_type_id->next_param_index >= 1) {
12676
+ TypeTableEntry *param_type = fn_type_id->param_info[next_proto_i].type;
12677
+ if (type_is_invalid(param_type))
12678
+ return ira->codegen->builtin_types.entry_invalid;
12679
+ first_arg_known_bare = param_type->id != TypeTableEntryIdPointer;
12680
+ }
12681
+
12682
+ IrInstruction *first_arg;
12683
+ if (!first_arg_known_bare && handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
12673
12684
first_arg = first_arg_ptr;
12674
12685
} else {
12675
12686
first_arg = ir_get_deref(ira, first_arg_ptr, first_arg_ptr);
@@ -12802,10 +12813,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
12802
12813
return ira->codegen->builtin_types.entry_invalid;
12803
12814
}
12804
12815
if (inst_fn_type_id.async_allocator_type == nullptr) {
12805
- IrInstruction *casted_inst = ir_implicit_byval_const_ref_cast(ira, uncasted_async_allocator_inst);
12806
- if (type_is_invalid(casted_inst->value.type))
12807
- return ira->codegen->builtin_types.entry_invalid;
12808
- inst_fn_type_id.async_allocator_type = casted_inst->value.type;
12816
+ inst_fn_type_id.async_allocator_type = uncasted_async_allocator_inst->value.type;
12809
12817
}
12810
12818
async_allocator_inst = ir_implicit_cast(ira, uncasted_async_allocator_inst, inst_fn_type_id.async_allocator_type);
12811
12819
if (type_is_invalid(async_allocator_inst->value.type))
@@ -12866,20 +12874,23 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
12866
12874
IrInstruction **casted_args = allocate<IrInstruction *>(call_param_count);
12867
12875
size_t next_arg_index = 0;
12868
12876
if (first_arg_ptr) {
12869
- IrInstruction *first_arg;
12870
12877
assert(first_arg_ptr->value.type->id == TypeTableEntryIdPointer);
12871
- if (handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
12878
+
12879
+ TypeTableEntry *param_type = fn_type_id->param_info[next_arg_index].type;
12880
+ if (type_is_invalid(param_type))
12881
+ return ira->codegen->builtin_types.entry_invalid;
12882
+
12883
+ IrInstruction *first_arg;
12884
+ if (param_type->id == TypeTableEntryIdPointer &&
12885
+ handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type))
12886
+ {
12872
12887
first_arg = first_arg_ptr;
12873
12888
} else {
12874
12889
first_arg = ir_get_deref(ira, first_arg_ptr, first_arg_ptr);
12875
12890
if (type_is_invalid(first_arg->value.type))
12876
12891
return ira->codegen->builtin_types.entry_invalid;
12877
12892
}
12878
12893
12879
- TypeTableEntry *param_type = fn_type_id->param_info[next_arg_index].type;
12880
- if (type_is_invalid(param_type))
12881
- return ira->codegen->builtin_types.entry_invalid;
12882
-
12883
12894
IrInstruction *casted_arg = ir_implicit_cast(ira, first_arg, param_type);
12884
12895
if (type_is_invalid(casted_arg->value.type))
12885
12896
return ira->codegen->builtin_types.entry_invalid;
0 commit comments