Skip to content

Zir: eliminate field_call_bind and field_call_bind_named #15691

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions lib/std/Thread/Mutex.zig
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ const FutexImpl = struct {
}
}

inline fn lockFast(self: *@This(), comptime casFn: []const u8) bool {
inline fn lockFast(self: *@This(), comptime cas_fn_name: []const u8) bool {
// On x86, use `lock bts` instead of `lock cmpxchg` as:
// - they both seem to mark the cache-line as modified regardless: https://stackoverflow.com/a/63350048
// - `lock bts` is smaller instruction-wise which makes it better for inlining
Expand All @@ -180,7 +180,8 @@ const FutexImpl = struct {

// Acquire barrier ensures grabbing the lock happens before the critical section
// and that the previous lock holder's critical section happens before we grab the lock.
return @field(self.state, casFn)(unlocked, locked, .Acquire, .Monotonic) == null;
const casFn = @field(@TypeOf(self.state), cas_fn_name);
return casFn(&self.state, unlocked, locked, .Acquire, .Monotonic) == null;
}

fn lockSlow(self: *@This()) void {
Expand Down
4 changes: 2 additions & 2 deletions lib/std/crypto/siphash.zig
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,8 @@ fn SipHashStateless(comptime T: type, comptime c_rounds: usize, comptime d_round
pub fn hash(msg: []const u8, key: *const [key_length]u8) T {
const aligned_len = msg.len - (msg.len % 8);
var c = Self.init(key);
@call(.always_inline, c.update, .{msg[0..aligned_len]});
return @call(.always_inline, c.final, .{msg[aligned_len..]});
@call(.always_inline, update, .{ &c, msg[0..aligned_len] });
return @call(.always_inline, final, .{ &c, msg[aligned_len..] });
}
};
}
Expand Down
10 changes: 7 additions & 3 deletions lib/std/hash/auto_hash.zig
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,13 @@ pub fn hashArray(hasher: anytype, key: anytype, comptime strat: HashStrategy) vo
/// Strategy is provided to determine if pointers should be followed or not.
pub fn hash(hasher: anytype, key: anytype, comptime strat: HashStrategy) void {
const Key = @TypeOf(key);
const Hasher = switch (@typeInfo(@TypeOf(hasher))) {
.Pointer => |ptr| ptr.child,
else => @TypeOf(hasher),
};

if (strat == .Shallow and comptime meta.trait.hasUniqueRepresentation(Key)) {
@call(.always_inline, hasher.update, .{mem.asBytes(&key)});
@call(.always_inline, Hasher.update, .{ hasher, mem.asBytes(&key) });
return;
}

Expand All @@ -89,12 +93,12 @@ pub fn hash(hasher: anytype, key: anytype, comptime strat: HashStrategy) void {
// TODO Check if the situation is better after #561 is resolved.
.Int => {
if (comptime meta.trait.hasUniqueRepresentation(Key)) {
@call(.always_inline, hasher.update, .{std.mem.asBytes(&key)});
@call(.always_inline, Hasher.update, .{ hasher, std.mem.asBytes(&key) });
} else {
// Take only the part containing the key value, the remaining
// bytes are undefined and must not be hashed!
const byte_size = comptime std.math.divCeil(comptime_int, @bitSizeOf(Key), 8) catch unreachable;
@call(.always_inline, hasher.update, .{std.mem.asBytes(&key)[0..byte_size]});
@call(.always_inline, Hasher.update, .{ hasher, std.mem.asBytes(&key)[0..byte_size] });
}
},

Expand Down
6 changes: 3 additions & 3 deletions lib/std/hash/wyhash.zig
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ const WyhashStateless = struct {

var off: usize = 0;
while (off < b.len) : (off += 32) {
@call(.always_inline, self.round, .{b[off..][0..32]});
@call(.always_inline, round, .{ self, b[off..][0..32] });
}

self.msg_len += b.len;
Expand Down Expand Up @@ -121,8 +121,8 @@ const WyhashStateless = struct {
const aligned_len = input.len - (input.len % 32);

var c = WyhashStateless.init(seed);
@call(.always_inline, c.update, .{input[0..aligned_len]});
return @call(.always_inline, c.final, .{input[aligned_len..]});
@call(.always_inline, update, .{ &c, input[0..aligned_len] });
return @call(.always_inline, final, .{ &c, input[aligned_len..] });
}
};

Expand Down
149 changes: 80 additions & 69 deletions src/AstGen.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2482,7 +2482,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
switch (zir_tags[inst]) {
// For some instructions, modify the zir data
// so we can avoid a separate ensure_result_used instruction.
.call => {
.call, .field_call => {
const extra_index = gz.astgen.instructions.items(.data)[inst].pl_node.payload_index;
const slot = &gz.astgen.extra.items[extra_index];
var flags = @bitCast(Zir.Inst.Call.Flags, slot.*);
Expand Down Expand Up @@ -2557,7 +2557,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.field_ptr,
.field_ptr_init,
.field_val,
.field_call_bind,
.field_ptr_named,
.field_val_named,
.func,
Expand Down Expand Up @@ -8516,7 +8515,7 @@ fn builtinCall(
},
.call => {
const modifier = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .modifier_type } }, params[0]);
const callee = try calleeExpr(gz, scope, params[1]);
const callee = try expr(gz, scope, .{ .rl = .none }, params[1]);
const args = try expr(gz, scope, .{ .rl = .none }, params[2]);
const result = try gz.addPlNode(.builtin_call, node, Zir.Inst.BuiltinCall{
.modifier = modifier,
Expand Down Expand Up @@ -8976,7 +8975,10 @@ fn callExpr(
} });
}

assert(callee != .none);
switch (callee) {
.direct => |obj| assert(obj != .none),
.field => |field| assert(field.obj_ptr != .none),
}
assert(node != 0);

const call_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
Expand Down Expand Up @@ -9015,89 +9017,98 @@ fn callExpr(
else => false,
};

const payload_index = try addExtra(astgen, Zir.Inst.Call{
.callee = callee,
.flags = .{
.pop_error_return_trace = !propagate_error_trace,
.packed_modifier = @intCast(Zir.Inst.Call.Flags.PackedModifier, @enumToInt(modifier)),
.args_len = @intCast(Zir.Inst.Call.Flags.PackedArgsLen, call.ast.params.len),
switch (callee) {
.direct => |callee_obj| {
const payload_index = try addExtra(astgen, Zir.Inst.Call{
.callee = callee_obj,
.flags = .{
.pop_error_return_trace = !propagate_error_trace,
.packed_modifier = @intCast(Zir.Inst.Call.Flags.PackedModifier, @enumToInt(modifier)),
.args_len = @intCast(Zir.Inst.Call.Flags.PackedArgsLen, call.ast.params.len),
},
});
if (call.ast.params.len != 0) {
try astgen.extra.appendSlice(astgen.gpa, astgen.scratch.items[scratch_top..]);
}
gz.astgen.instructions.set(call_index, .{
.tag = .call,
.data = .{ .pl_node = .{
.src_node = gz.nodeIndexToRelative(node),
.payload_index = payload_index,
} },
});
},
.field => |callee_field| {
const payload_index = try addExtra(astgen, Zir.Inst.FieldCall{
.obj_ptr = callee_field.obj_ptr,
.field_name_start = callee_field.field_name_start,
.flags = .{
.pop_error_return_trace = !propagate_error_trace,
.packed_modifier = @intCast(Zir.Inst.Call.Flags.PackedModifier, @enumToInt(modifier)),
.args_len = @intCast(Zir.Inst.Call.Flags.PackedArgsLen, call.ast.params.len),
},
});
if (call.ast.params.len != 0) {
try astgen.extra.appendSlice(astgen.gpa, astgen.scratch.items[scratch_top..]);
}
gz.astgen.instructions.set(call_index, .{
.tag = .field_call,
.data = .{ .pl_node = .{
.src_node = gz.nodeIndexToRelative(node),
.payload_index = payload_index,
} },
});
},
});
if (call.ast.params.len != 0) {
try astgen.extra.appendSlice(astgen.gpa, astgen.scratch.items[scratch_top..]);
}
gz.astgen.instructions.set(call_index, .{
.tag = .call,
.data = .{ .pl_node = .{
.src_node = gz.nodeIndexToRelative(node),
.payload_index = payload_index,
} },
});
return rvalue(gz, ri, call_inst, node); // TODO function call with result location
}

/// calleeExpr generates the function part of a call expression (f in f(x)), or the
/// callee argument to the @call() builtin. If the lhs is a field access or the
/// @field() builtin, we need to generate a special field_call_bind instruction
/// instead of the normal field_val or field_ptr. If this is a inst.func() call,
/// this instruction will capture the value of the first argument before evaluating
/// the other arguments. We need to use .ref here to guarantee we will be able to
/// promote an lvalue to an address if the first parameter requires it. This
/// unfortunately also means we need to take a reference to any types on the lhs.
const Callee = union(enum) {
field: struct {
/// A *pointer* to the object the field is fetched on, so that we can
/// promote the lvalue to an address if the first parameter requires it.
obj_ptr: Zir.Inst.Ref,
/// Offset into `string_bytes`.
field_name_start: u32,
},
direct: Zir.Inst.Ref,
};

/// calleeExpr generates the function part of a call expression (f in f(x)), but
/// *not* the callee argument to the @call() builtin. Its purpose is to
/// distinguish between standard calls and method call syntax `a.b()`. Thus, if
/// the lhs is a field access, we return using the `field` union field;
/// otherwise, we use the `direct` union field.
fn calleeExpr(
gz: *GenZir,
scope: *Scope,
node: Ast.Node.Index,
) InnerError!Zir.Inst.Ref {
) InnerError!Callee {
const astgen = gz.astgen;
const tree = astgen.tree;

const tag = tree.nodes.items(.tag)[node];
switch (tag) {
.field_access => return addFieldAccess(.field_call_bind, gz, scope, .{ .rl = .ref }, node),

.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
.builtin_call_comma,
=> {
const node_datas = tree.nodes.items(.data);
.field_access => {
const main_tokens = tree.nodes.items(.main_token);
const builtin_token = main_tokens[node];
const builtin_name = tree.tokenSlice(builtin_token);

var inline_params: [2]Ast.Node.Index = undefined;
var params: []Ast.Node.Index = switch (tag) {
.builtin_call,
.builtin_call_comma,
=> tree.extra_data[node_datas[node].lhs..node_datas[node].rhs],

.builtin_call_two,
.builtin_call_two_comma,
=> blk: {
inline_params = .{ node_datas[node].lhs, node_datas[node].rhs };
const len: usize = if (inline_params[0] == 0) @as(usize, 0) else if (inline_params[1] == 0) @as(usize, 1) else @as(usize, 2);
break :blk inline_params[0..len];
},

else => unreachable,
};
const node_datas = tree.nodes.items(.data);
const object_node = node_datas[node].lhs;
const dot_token = main_tokens[node];
const field_ident = dot_token + 1;
const str_index = try astgen.identAsString(field_ident);
// Capture the object by reference so we can promote it to an
// address in Sema if needed.
const lhs = try expr(gz, scope, .{ .rl = .ref }, object_node);

// If anything is wrong, fall back to builtinCall.
// It will emit any necessary compile errors and notes.
if (std.mem.eql(u8, builtin_name, "@field") and params.len == 2) {
const lhs = try expr(gz, scope, .{ .rl = .ref }, params[0]);
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = .const_slice_u8_type } }, params[1]);
return gz.addExtendedPayload(.field_call_bind_named, Zir.Inst.FieldNamedNode{
.node = gz.nodeIndexToRelative(node),
.lhs = lhs,
.field_name = field_name,
});
}
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
try emitDbgStmt(gz, cursor);

return builtinCall(gz, scope, .{ .rl = .none }, node, params);
return .{ .field = .{
.obj_ptr = lhs,
.field_name_start = str_index,
} };
},
else => return expr(gz, scope, .{ .rl = .none }, node),
else => return .{ .direct = try expr(gz, scope, .{ .rl = .none }, node) },
}
}

Expand Down
5 changes: 2 additions & 3 deletions src/Autodoc.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2141,7 +2141,7 @@ fn walkInstruction(
.expr = .{ .declRef = decl_status },
};
},
.field_val, .field_call_bind, .field_ptr, .field_type => {
.field_val, .field_ptr, .field_type => {
// TODO: field type uses Zir.Inst.FieldType, it just happens to have the
// same layout as Zir.Inst.Field :^)
const pl_node = data[inst_index].pl_node;
Expand All @@ -2163,7 +2163,6 @@ fn walkInstruction(

const lhs = @enumToInt(lhs_extra.data.lhs) - Ref.typed_value_map.len;
if (tags[lhs] != .field_val and
tags[lhs] != .field_call_bind and
tags[lhs] != .field_ptr and
tags[lhs] != .field_type) break :blk lhs_extra.data.lhs;

Expand Down Expand Up @@ -2191,7 +2190,7 @@ fn walkInstruction(
const wr = blk: {
if (@enumToInt(lhs_ref) >= Ref.typed_value_map.len) {
const lhs_inst = @enumToInt(lhs_ref) - Ref.typed_value_map.len;
if (tags[lhs_inst] == .call) {
if (tags[lhs_inst] == .call or tags[lhs_inst] == .field_call) {
break :blk DocData.WalkResult{
.expr = .{
.comptimeExpr = 0,
Expand Down
16 changes: 15 additions & 1 deletion src/Module.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2489,8 +2489,21 @@ pub const SrcLoc = struct {
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
var buf: [1]Ast.Node.Index = undefined;
const tok_index = switch (node_tags[node]) {
.field_access => node_datas[node].rhs,
.call_one,
.call_one_comma,
.async_call_one,
.async_call_one_comma,
.call,
.call_comma,
.async_call,
.async_call_comma,
=> blk: {
const full = tree.fullCall(&buf, node).?;
break :blk tree.lastToken(full.ast.fn_expr);
},
else => tree.firstToken(node) - 2,
};
const start = tree.tokens.items(.start)[tok_index];
Expand Down Expand Up @@ -3083,7 +3096,8 @@ pub const LazySrcLoc = union(enum) {
/// The payload is offset from the containing Decl AST node.
/// The source location points to the field name of:
/// * a field access expression (`a.b`), or
/// * the operand ("b" node) of a field initialization expression (`.a = b`)
/// * the callee of a method call (`a.b()`), or
/// * the operand ("b" node) of a field initialization expression (`.a = b`), or
/// The Decl is determined contextually.
node_offset_field_name: i32,
/// The source location points to the pointer of a pointer deref expression,
Expand Down
Loading