Skip to content

update depreciated code #7502

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Dec 23, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions doc/docgen.zig
Original file line number Diff line number Diff line change
Expand Up @@ -42,15 +42,15 @@ pub fn main() !void {

const input_file_bytes = try in_file.inStream().readAllAlloc(allocator, max_doc_file_size);

var buffered_out_stream = io.bufferedOutStream(out_file.outStream());
var buffered_out_stream = io.bufferedOutStream(out_file.writer());

var tokenizer = Tokenizer.init(in_file_name, input_file_bytes);
var toc = try genToc(allocator, &tokenizer);

try fs.cwd().makePath(tmp_dir_name);
defer fs.cwd().deleteTree(tmp_dir_name) catch {};

try genHtml(allocator, &tokenizer, &toc, buffered_out_stream.outStream(), zig_exe);
try genHtml(allocator, &tokenizer, &toc, buffered_out_stream.writer(), zig_exe);
try buffered_out_stream.flush();
}

Expand Down Expand Up @@ -325,7 +325,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
var toc_buf = std.ArrayList(u8).init(allocator);
defer toc_buf.deinit();

var toc = toc_buf.outStream();
var toc = toc_buf.writer();

var nodes = std.ArrayList(Node).init(allocator);
defer nodes.deinit();
Expand Down Expand Up @@ -615,7 +615,7 @@ fn urlize(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = std.ArrayList(u8).init(allocator);
defer buf.deinit();

const out = buf.outStream();
const out = buf.writer();
for (input) |c| {
switch (c) {
'a'...'z', 'A'...'Z', '_', '-', '0'...'9' => {
Expand All @@ -634,7 +634,7 @@ fn escapeHtml(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = std.ArrayList(u8).init(allocator);
defer buf.deinit();

const out = buf.outStream();
const out = buf.writer();
try writeEscaped(out, input);
return buf.toOwnedSlice();
}
Expand Down Expand Up @@ -680,7 +680,7 @@ fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = std.ArrayList(u8).init(allocator);
defer buf.deinit();

var out = buf.outStream();
var out = buf.writer();
var number_start_index: usize = undefined;
var first_number: usize = undefined;
var second_number: usize = undefined;
Expand Down
2 changes: 1 addition & 1 deletion src/main.zig
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ pub fn mainArgs(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
} else if (mem.eql(u8, cmd, "version")) {
try std.io.getStdOut().writeAll(build_options.version ++ "\n");
} else if (mem.eql(u8, cmd, "env")) {
try @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().outStream());
try @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer());
} else if (mem.eql(u8, cmd, "zen")) {
try io.getStdOut().writeAll(info_zen);
} else if (mem.eql(u8, cmd, "help") or mem.eql(u8, cmd, "-h") or mem.eql(u8, cmd, "--help")) {
Expand Down
2 changes: 1 addition & 1 deletion src/print_env.zig
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const introspect = @import("introspect.zig");
const Allocator = std.mem.Allocator;
const fatal = @import("main.zig").fatal;

pub fn cmdEnv(gpa: *Allocator, args: []const []const u8, stdout: anytype) !void {
pub fn cmdEnv(gpa: *Allocator, args: []const []const u8, stdout: std.fs.File.Writer) !void {
const self_exe_path = try std.fs.selfExePathAlloc(gpa);
defer gpa.free(self_exe_path);

Expand Down
30 changes: 15 additions & 15 deletions tools/merge_anal_dumps.zig
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@ pub fn main() anyerror!void {
var dump = Dump.init(allocator);
for (args[1..]) |arg| {
parser = json.Parser.init(allocator, false);
const json_text = try std.io.readFileAlloc(allocator, arg);
const json_text = try std.fs.cwd().readFileAlloc(allocator, arg, std.math.maxInt(usize));
const tree = try parser.parse(json_text);
try dump.mergeJson(tree.root);
}

const stdout = try std.io.getStdOut();
try dump.render(stdout.outStream());
try dump.render(stdout.writer());
}

/// AST source node
Expand All @@ -33,12 +33,12 @@ const Node = struct {
col: usize,
fields: []usize,

fn hash(n: Node) u32 {
fn hash(n: Node) u64 {
var hasher = std.hash.Wyhash.init(0);
std.hash.autoHash(&hasher, n.file);
std.hash.autoHash(&hasher, n.line);
std.hash.autoHash(&hasher, n.col);
return @truncate(u32, hasher.final());
return hasher.final();
}

fn eql(a: Node, b: Node) bool {
Expand All @@ -52,10 +52,10 @@ const Error = struct {
src: usize,
name: []const u8,

fn hash(n: Error) u32 {
fn hash(n: Error) u64 {
var hasher = std.hash.Wyhash.init(0);
std.hash.autoHash(&hasher, n.src);
return @truncate(u32, hasher.final());
return hasher.final();
}

fn eql(a: Error, b: Error) bool {
Expand Down Expand Up @@ -103,7 +103,6 @@ const Type = union(builtin.TypeId) {
Union, // TODO
Fn, // TODO
BoundFn, // TODO
ArgTuple, // TODO
Opaque, // TODO
Frame, // TODO

Expand All @@ -127,10 +126,10 @@ const Type = union(builtin.TypeId) {
len: usize,
};

fn hash(t: Type) u32 {
fn hash(t: Type) u64 {
var hasher = std.hash.Wyhash.init(0);
std.hash.autoHash(&hasher, builtin.TypeId(t));
return @truncate(u32, hasher.final());
std.hash.autoHash(&hasher, t);
return hasher.final();
}

fn eql(a: Type, b: Type) bool {
Expand All @@ -144,22 +143,23 @@ const Dump = struct {
root_name: ?[]const u8 = null,
targets: std.ArrayList([]const u8),

const FileMap = std.StringHashMap(usize);
file_list: std.ArrayList([]const u8),
file_map: FileMap,

const NodeMap = std.HashMap(Node, usize, Node.hash, Node.eql);
node_list: std.ArrayList(Node),
node_map: NodeMap,

const ErrorMap = std.HashMap(Error, usize, Error.hash, Error.eql);
error_list: std.ArrayList(Error),
error_map: ErrorMap,

const TypeMap = std.HashMap(Type, usize, Type.hash, Type.eql);
type_list: std.ArrayList(Type),
type_map: TypeMap,

const FileMap = std.StringHashMap(usize);
const NodeMap = std.HashMap(Node, usize, Node.hash, Node.eql, 80);
const ErrorMap = std.HashMap(Error, usize, Error.hash, Error.eql, 80);
const TypeMap = std.HashMap(Type, usize, Type.hash, Type.eql, 80);

fn init(allocator: *mem.Allocator) Dump {
return Dump{
.targets = std.ArrayList([]const u8).init(allocator),
Expand Down Expand Up @@ -310,7 +310,7 @@ const Dump = struct {
try other_types_to_mine.putNoClobber(other_type_index, gop.kv.value);
}

fn render(self: *Dump, stream: var) !void {
fn render(self: *Dump, stream: anytype) !void {
var jw = json.WriteStream(@TypeOf(stream).Child, 10).init(stream);
try jw.beginObject();

Expand Down
12 changes: 6 additions & 6 deletions tools/update_glibc.zig
Original file line number Diff line number Diff line change
Expand Up @@ -239,8 +239,8 @@ pub fn main() !void {
const vers_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "vers.txt" });
const vers_txt_file = try fs.cwd().createFile(vers_txt_path, .{});
defer vers_txt_file.close();
var buffered = std.io.bufferedOutStream(vers_txt_file.outStream());
const vers_txt = buffered.outStream();
var buffered = std.io.bufferedOutStream(vers_txt_file.writer());
const vers_txt = buffered.writer();
for (global_ver_list) |name, i| {
_ = global_ver_set.put(name, i) catch unreachable;
try vers_txt.print("{}\n", .{name});
Expand All @@ -251,8 +251,8 @@ pub fn main() !void {
const fns_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "fns.txt" });
const fns_txt_file = try fs.cwd().createFile(fns_txt_path, .{});
defer fns_txt_file.close();
var buffered = std.io.bufferedOutStream(fns_txt_file.outStream());
const fns_txt = buffered.outStream();
var buffered = std.io.bufferedOutStream(fns_txt_file.writer());
const fns_txt = buffered.writer();
for (global_fn_list) |name, i| {
const entry = global_fn_set.getEntry(name).?;
entry.value.index = i;
Expand Down Expand Up @@ -282,8 +282,8 @@ pub fn main() !void {
const abilist_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "abi.txt" });
const abilist_txt_file = try fs.cwd().createFile(abilist_txt_path, .{});
defer abilist_txt_file.close();
var buffered = std.io.bufferedOutStream(abilist_txt_file.outStream());
const abilist_txt = buffered.outStream();
var buffered = std.io.bufferedOutStream(abilist_txt_file.writer());
const abilist_txt = buffered.writer();

// first iterate over the abi lists
for (abi_lists) |*abi_list, abi_index| {
Expand Down