From fd0398b32a6448568cd871a05d7734a8e607442a Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Fri, 11 Nov 2022 23:39:33 +0100 Subject: [PATCH 01/51] Rename `@maximum` and `@minimum` to `@max` and `@min` --- misctools/http_bench.zig | 2 +- src/__global.zig | 4 +- src/analytics/analytics_schema.zig | 2 +- src/api/demo/schema.zig | 2 +- src/api/schema.zig | 2 +- src/bun.js/api/bun.zig | 8 +-- src/bun.js/api/bun/socket.zig | 14 ++--- src/bun.js/api/bun/subprocess.zig | 4 +- src/bun.js/api/server.zig | 14 ++--- src/bun.js/base.zig | 4 +- src/bun.js/bindings/bindings.zig | 10 ++-- src/bun.js/bindings/exports.zig | 8 +-- src/bun.js/javascript.zig | 4 +- src/bun.js/module_loader.zig | 2 +- src/bun.js/node/buffer.zig | 4 +- src/bun.js/node/node_fs.zig | 24 ++++---- src/bun.js/node/syscall.zig | 10 ++-- src/bun.js/node/types.zig | 4 +- src/bun.js/test/jest.zig | 2 +- src/bun.js/webcore/encoding.zig | 6 +- src/bun.js/webcore/response.zig | 22 ++++---- src/bun.js/webcore/streams.zig | 28 ++++----- src/cli/colon_list_type.zig | 2 +- src/cli/upgrade_command.zig | 2 +- src/deps/picohttp.zig | 2 +- src/deps/zig-clap/clap/comptime.zig | 2 +- src/env_loader.zig | 10 ++-- src/http.zig | 8 +-- src/http/url_path.zig | 4 +- src/http/websocket_http_client.zig | 16 +++--- src/http_client_async.zig | 10 ++-- src/install/dependency.zig | 26 ++++----- src/install/extract_tarball.zig | 2 +- src/install/install.zig | 16 +++--- src/install/integrity.zig | 4 +- src/io/io_darwin.zig | 4 +- src/io/io_linux.zig | 4 +- src/js_lexer.zig | 2 +- src/js_parser.zig | 4 +- src/js_printer.zig | 2 +- src/json_parser.zig | 4 +- src/linker.zig | 4 +- src/logger.zig | 6 +- src/mdx/mdx_parser.zig | 88 ++++++++++++++--------------- src/napi/napi.zig | 8 +-- src/report.zig | 12 ++-- src/resolver/package_json.zig | 2 +- src/resolver/resolve_path.zig | 4 +- src/router.zig | 8 +-- src/sourcemap/sourcemap.zig | 2 +- src/sourcemap/vlq_bench.zig | 2 +- src/string_immutable.zig | 28 ++++----- src/string_joiner.zig | 2 +- src/watcher.zig | 2 +- 54 files changed, 236 insertions(+), 236 deletions(-) diff --git a/misctools/http_bench.zig b/misctools/http_bench.zig index dbec009f9c138f..fe2d29d6a43b2e 100644 --- a/misctools/http_bench.zig +++ b/misctools/http_bench.zig @@ -253,7 +253,7 @@ pub fn main() anyerror!void { } max_duration = @maximum(max_duration, http.elapsed); - min_duration = @minimum(min_duration, http.elapsed); + min_duration = @min(min_duration, http.elapsed); switch (resp.status_code) { 200, 202, 302 => { diff --git a/src/__global.zig b/src/__global.zig index ceabb12d9cd340..70d9e8b6860cd3 100644 --- a/src/__global.zig +++ b/src/__global.zig @@ -16,9 +16,9 @@ else pub const package_json_version_with_sha = if (Environment.git_sha.len == 0) package_json_version else if (Environment.isDebug) - std.fmt.comptimePrint(BASE_VERSION ++ ".{d}_debug ({s})", .{ build_id, Environment.git_sha[0..@minimum(Environment.git_sha.len, 8)] }) + std.fmt.comptimePrint(BASE_VERSION ++ ".{d}_debug ({s})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }) else - std.fmt.comptimePrint(BASE_VERSION ++ ".{d} ({s})", .{ build_id, Environment.git_sha[0..@minimum(Environment.git_sha.len, 8)] }); + std.fmt.comptimePrint(BASE_VERSION ++ ".{d} ({s})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }); pub const os_name = if (Environment.isWindows) "win32" diff --git a/src/analytics/analytics_schema.zig b/src/analytics/analytics_schema.zig index b9d07424961025..11dd7d919ffc50 100644 --- a/src/analytics/analytics_schema.zig +++ b/src/analytics/analytics_schema.zig @@ -17,7 +17,7 @@ pub const Reader = struct { } pub fn read(this: *Self, count: usize) ![]u8 { - const read_count = @minimum(count, this.remain.len); + const read_count = @min(count, this.remain.len); if (read_count < count) { return error.EOF; } diff --git a/src/api/demo/schema.zig b/src/api/demo/schema.zig index 61d6b82d1da994..e4871b902d4fe0 100644 --- a/src/api/demo/schema.zig +++ b/src/api/demo/schema.zig @@ -17,7 +17,7 @@ pub const Reader = struct { } pub fn read(this: *Self, count: usize) ![]u8 { - const read_count = @minimum(count, this.remain.len); + const read_count = @min(count, this.remain.len); if (read_count < count) { return error.EOF; } diff --git a/src/api/schema.zig b/src/api/schema.zig index e16c4ed797e528..4c436af8f45031 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -17,7 +17,7 @@ pub const Reader = struct { } pub fn read(this: *Self, count: usize) ![]u8 { - const read_count = @minimum(count, this.remain.len); + const read_count = @min(count, this.remain.len); if (read_count < count) { return error.EOF; } diff --git a/src/bun.js/api/bun.zig b/src/bun.js/api/bun.zig index 727712897d0c58..2fa6839d6c70e1 100644 --- a/src/bun.js/api/bun.zig +++ b/src/bun.js/api/bun.zig @@ -679,7 +679,7 @@ pub fn getRouteFiles( const router = &VirtualMachine.vm.bundler.router.?; const list = router.getPublicPaths() catch unreachable; - for (routes_list_strings[0..@minimum(list.len, routes_list_strings.len)]) |_, i| { + for (routes_list_strings[0..@min(list.len, routes_list_strings.len)]) |_, i| { routes_list_strings[i] = ZigString.init(list[i]); } @@ -700,7 +700,7 @@ pub fn getRouteNames( const router = &VirtualMachine.vm.bundler.router.?; const list = router.getNames() catch unreachable; - for (routes_list_strings[0..@minimum(list.len, routes_list_strings.len)]) |_, i| { + for (routes_list_strings[0..@min(list.len, routes_list_strings.len)]) |_, i| { routes_list_strings[i] = ZigString.init(list[i]); } @@ -1705,7 +1705,7 @@ pub fn allocUnsafe( const length = @intCast( usize, - @minimum( + @min( @maximum(1, (args.nextEat() orelse JSC.JSValue.jsNumber(@as(i32, 1))).toInt32()), std.math.maxInt(i32), ), @@ -1733,7 +1733,7 @@ pub fn mmapFile( var args = JSC.Node.ArgumentsSlice.from(ctx.bunVM(), arguments); var buf: [bun.MAX_PATH_BYTES]u8 = undefined; - const path = getFilePath(ctx, arguments[0..@minimum(1, arguments.len)], &buf, exception) orelse return null; + const path = getFilePath(ctx, arguments[0..@min(1, arguments.len)], &buf, exception) orelse return null; args.eat(); buf[path.len] = 0; diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 77b4a266fde314..7b629da6e80fe7 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -1165,7 +1165,7 @@ fn NewSocket(comptime ssl: bool) type { var buf: [512]u8 = undefined; var length: i32 = 512; this.socket.remoteAddress(&buf, &length); - const address = buf[0..@intCast(usize, @minimum(length, 0))]; + const address = buf[0..@intCast(usize, @min(length, 0))]; if (address.len == 0) { return JSValue.jsUndefined(); @@ -1198,7 +1198,7 @@ fn NewSocket(comptime ssl: bool) type { return .zero; } - const offset = @minimum(args.ptr[1].toUInt64NoTruncate(), slice.len); + const offset = @min(args.ptr[1].toUInt64NoTruncate(), slice.len); slice = slice[offset..]; if (args.len > 2) { @@ -1207,7 +1207,7 @@ fn NewSocket(comptime ssl: bool) type { return .zero; } - const length = @minimum(args.ptr[2].toUInt64NoTruncate(), slice.len); + const length = @min(args.ptr[2].toUInt64NoTruncate(), slice.len); slice = slice[0..length]; } } @@ -1253,7 +1253,7 @@ fn NewSocket(comptime ssl: bool) type { return .zero; } - const offset = @minimum(args.ptr[1].toUInt64NoTruncate(), slice.len); + const offset = @min(args.ptr[1].toUInt64NoTruncate(), slice.len); slice = slice[offset..]; if (args.len > 2) { @@ -1262,7 +1262,7 @@ fn NewSocket(comptime ssl: bool) type { return .zero; } - const length = @minimum(args.ptr[2].toUInt64NoTruncate(), slice.len); + const length = @min(args.ptr[2].toUInt64NoTruncate(), slice.len); slice = slice[0..length]; } } @@ -1288,7 +1288,7 @@ fn NewSocket(comptime ssl: bool) type { return .zero; } - const offset = @minimum(args.ptr[1].toUInt64NoTruncate(), slice.len); + const offset = @min(args.ptr[1].toUInt64NoTruncate(), slice.len); slice = slice[offset..]; if (args.len > 2) { @@ -1297,7 +1297,7 @@ fn NewSocket(comptime ssl: bool) type { return .zero; } - const length = @minimum(args.ptr[2].toUInt64NoTruncate(), slice.len); + const length = @min(args.ptr[2].toUInt64NoTruncate(), slice.len); slice = slice[0..length]; } } diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index c82b4744f3253a..4f6a910347486a 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -431,7 +431,7 @@ pub const Subprocess = struct { }, ); - this.remain = this.remain[@minimum(bytes_written, this.remain.len)..]; + this.remain = this.remain[@min(bytes_written, this.remain.len)..]; to_write = to_write[bytes_written..]; // we are done or it accepts no more input @@ -966,7 +966,7 @@ pub const Subprocess = struct { if (!stdio_val.isEmptyOrUndefinedOrNull()) { if (stdio_val.jsType().isArray()) { var stdio_iter = stdio_val.arrayIterator(globalThis); - stdio_iter.len = @minimum(stdio_iter.len, 3); + stdio_iter.len = @min(stdio_iter.len, 3); var i: usize = 0; while (stdio_iter.next()) |value| : (i += 1) { if (!extractStdio(globalThis, i, value, &stdio)) diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index f4d173c95d1e21..4d1d1369386782 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -304,7 +304,7 @@ pub const ServerConfig = struct { if (arg.getTruthy(global, "port")) |port_| { args.port = @intCast( u16, - @minimum( + @min( @maximum(0, port_.coerce(i32, global)), std.math.maxInt(u16), ), @@ -1035,8 +1035,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp return false; } - const adjusted_count_temporary = @minimum(@as(u64, this.sendfile.remain), @as(u63, std.math.maxInt(u63))); - // TODO we should not need this int cast; improve the return type of `@minimum` + const adjusted_count_temporary = @min(@as(u64, this.sendfile.remain), @as(u63, std.math.maxInt(u63))); + // TODO we should not need this int cast; improve the return type of `@min` const adjusted_count = @intCast(u63, adjusted_count_temporary); if (Environment.isLinux) { @@ -1114,7 +1114,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp pub fn sendWritableBytesForBlob(this: *RequestContext, bytes_: []const u8, write_offset: c_ulong, resp: *App.Response) bool { std.debug.assert(this.resp == resp); - var bytes = bytes_[@minimum(bytes_.len, @truncate(usize, write_offset))..]; + var bytes = bytes_[@min(bytes_.len, @truncate(usize, write_offset))..]; if (resp.tryEnd(bytes, bytes_.len, this.shouldCloseConnection())) { this.finalize(); return true; @@ -1127,7 +1127,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp pub fn sendWritableBytesForCompleteResponseBuffer(this: *RequestContext, bytes_: []const u8, write_offset: c_ulong, resp: *App.Response) bool { std.debug.assert(this.resp == resp); - var bytes = bytes_[@minimum(bytes_.len, @truncate(usize, write_offset))..]; + var bytes = bytes_[@min(bytes_.len, @truncate(usize, write_offset))..]; if (resp.tryEnd(bytes, bytes_.len, this.shouldCloseConnection())) { this.response_buf_owned.items.len = 0; this.finalize(); @@ -2057,7 +2057,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp this.resp.writeHeader( "content-disposition", - std.fmt.bufPrint(&filename_buf, "filename=\"{s}\"", .{basename[0..@minimum(basename.len, 1024 - 32)]}) catch "", + std.fmt.bufPrint(&filename_buf, "filename=\"{s}\"", .{basename[0..@min(basename.len, 1024 - 32)]}) catch "", ); } } @@ -2175,7 +2175,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp } if (this.request_body_buf.capacity == 0) { - this.request_body_buf.ensureTotalCapacityPrecise(this.allocator, @minimum(this.request_body_content_len, max_request_body_preallocate_length)) catch @panic("Out of memory while allocating request body buffer"); + this.request_body_buf.ensureTotalCapacityPrecise(this.allocator, @min(this.request_body_content_len, max_request_body_preallocate_length)) catch @panic("Out of memory while allocating request body buffer"); } this.request_body_buf.appendSlice(this.allocator, chunk) catch @panic("Out of memory while allocating request body"); diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index 225536591b76c2..cdaa788bcf6ccd 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -207,7 +207,7 @@ pub const To = struct { if (value.len <= prefill) { var array: [prefill]JSC.C.JSValueRef = undefined; var i: u8 = 0; - const len = @minimum(@intCast(u8, value.len), prefill); + const len = @min(@intCast(u8, value.len), prefill); while (i < len and exception.* == null) : (i += 1) { array[i] = if (comptime Child == JSC.C.JSValueRef) value[i] @@ -3257,7 +3257,7 @@ pub fn DOMCall( \\ static const JSC::DOMJIT::Signature {[signatureName]s}( \\ {[fastPathName]s}Wrapper, \\ thisObject->classInfo(), - \\ + \\ ; try writer.print(fmt, .{ diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index c374ff0ec182d6..3c60997401fd2c 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -105,10 +105,10 @@ pub const ZigString = extern struct { pub fn substring(this: ZigString, offset: usize) ZigString { if (this.is16Bit()) { - return ZigString.from16Slice(this.utf16SliceAligned()[@minimum(this.len, offset)..]); + return ZigString.from16Slice(this.utf16SliceAligned()[@min(this.len, offset)..]); } - var out = ZigString.init(this.slice()[@minimum(this.len, offset)..]); + var out = ZigString.init(this.slice()[@min(this.len, offset)..]); if (this.isUTF8()) { out.markUTF8(); } @@ -178,7 +178,7 @@ pub const ZigString = extern struct { } pub fn trunc(this: ZigString, len: usize) ZigString { - return .{ .ptr = this.ptr, .len = @minimum(len, this.len) }; + return .{ .ptr = this.ptr, .len = @min(len, this.len) }; } pub fn eqlComptime(this: ZigString, comptime other: []const u8) bool { @@ -438,7 +438,7 @@ pub const ZigString = extern struct { } pub fn slice(this: *const ZigString) []const u8 { - return untagged(this.ptr)[0..@minimum(this.len, std.math.maxInt(u32))]; + return untagged(this.ptr)[0..@min(this.len, std.math.maxInt(u32))]; } pub fn dupe(this: ZigString, allocator: std.mem.Allocator) ![]const u8 { @@ -3941,7 +3941,7 @@ pub const CallFrame = opaque { var buf: [max]JSC.JSValue = std.mem.zeroes([max]JSC.JSValue); const len = self.argumentsCount(); var ptr = self.argumentsPtr(); - switch (@minimum(len, max)) { + switch (@min(len, max)) { 0 => { return .{ .ptr = buf, .len = 0 }; }, diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig index 2e2c5e5567fb38..02eadc1d94f6ac 100644 --- a/src/bun.js/bindings/exports.zig +++ b/src/bun.js/bindings/exports.zig @@ -1473,7 +1473,7 @@ pub const ZigConsoleClient = struct { else writer.writeLatin1(end); any_non_ascii = false; - slice = slice[@minimum(slice.len, i + 1)..]; + slice = slice[@min(slice.len, i + 1)..]; i = 0; len = @truncate(u32, slice.len); const next_value = this.remaining_values[0]; @@ -1549,11 +1549,11 @@ pub const ZigConsoleClient = struct { comptime Writer: type, writer: Writer, ) !void { - const indent = @minimum(this.indent, 8); + const indent = @min(this.indent, 8); var buf = [_]u8{' '} ** 32; var total_remain: usize = indent; while (total_remain > 0) { - const written = @minimum(16, total_remain); + const written = @min(16, total_remain); try writer.writeAll(buf[0 .. written * 2]); total_remain -|= written; } @@ -2395,7 +2395,7 @@ pub const ZigConsoleClient = struct { writer.print(comptime Output.prettyFmt(fmt_, enable_ansi_colors), .{slice[0]}); var leftover = slice[1..]; const max = 512; - leftover = leftover[0..@minimum(leftover.len, max)]; + leftover = leftover[0..@min(leftover.len, max)]; for (leftover) |el| { printComma(undefined, @TypeOf(&writer.ctx), &writer.ctx, enable_ansi_colors) catch unreachable; writer.writeAll(" "); diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 82d34a986521cc..adcc33ab84333e 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -1168,7 +1168,7 @@ pub const VirtualMachine = struct { else => { var errors_stack: [256]*anyopaque = undefined; - var errors = errors_stack[0..@minimum(log.msgs.items.len, errors_stack.len)]; + var errors = errors_stack[0..@min(log.msgs.items.len, errors_stack.len)]; for (log.msgs.items) |msg, i| { errors[i] = switch (msg.metadata) { @@ -1662,7 +1662,7 @@ pub const VirtualMachine = struct { std.mem.set(ZigString, source_lines, ZigString.Empty); std.mem.set(i32, source_line_numbers, 0); - var lines_ = lines[0..@minimum(lines.len, source_lines.len)]; + var lines_ = lines[0..@min(lines.len, source_lines.len)]; for (lines_) |line, j| { source_lines[(lines_.len - 1) - j] = ZigString.init(line); source_line_numbers[j] = top.position.line - @intCast(i32, j) + 1; diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 59a4288fb5cb9e..6d834f489e039e 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -1433,7 +1433,7 @@ pub const ModuleLoader = struct { const after_namespace = if (namespace.len == 0) specifier else - specifier[@minimum(namespace.len + 1, specifier.len)..]; + specifier[@min(namespace.len + 1, specifier.len)..]; return globalObject.runOnLoadPlugins(ZigString.init(namespace), ZigString.init(after_namespace), .bun) orelse return JSValue.zero; } diff --git a/src/bun.js/node/buffer.zig b/src/bun.js/node/buffer.zig index 541079a0b94ffd..dd3b97f96a5145 100644 --- a/src/bun.js/node/buffer.zig +++ b/src/bun.js/node/buffer.zig @@ -77,7 +77,7 @@ pub const BufferVectorized = struct { const minimum_contents = contents; while (buf.len >= contents.len) { - const min_len = @minimum(contents.len, buf.len); + const min_len = @min(contents.len, buf.len); std.mem.copy(u8, buf[0..min_len], contents[0..min_len]); if (buf.len <= contents.len) { break; @@ -87,7 +87,7 @@ pub const BufferVectorized = struct { } while (buf.len > 0) { - const to_fill = @minimum(minimum_contents.len, buf.len); + const to_fill = @min(minimum_contents.len, buf.len); std.mem.copy(u8, buf[0..to_fill], minimum_contents[0..to_fill]); buf = buf[to_fill..]; } diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 23596d7b50671e..0abef19c0e516f 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -2266,7 +2266,7 @@ const Return = struct { ctx.ptr(), &fields.bytesRead, &fields.buffer, - JSC.JSValue.jsNumberFromUint64(@intCast(u52, @minimum(std.math.maxInt(u52), this.bytes_read))), + JSC.JSValue.jsNumberFromUint64(@intCast(u52, @min(std.math.maxInt(u52), this.bytes_read))), this.buffer_val, ).asObjectRef(); } @@ -2290,7 +2290,7 @@ const Return = struct { ctx.ptr(), &fields.bytesWritten, &fields.buffer, - JSC.JSValue.jsNumberFromUint64(@intCast(u52, @minimum(std.math.maxInt(u52), this.bytes_written))), + JSC.JSValue.jsNumberFromUint64(@intCast(u52, @min(std.math.maxInt(u52), this.bytes_written))), if (this.buffer == .buffer) this.buffer_val else @@ -2511,7 +2511,7 @@ pub const NodeFS = struct { var buf: [16384]u8 = undefined; var remain = @intCast(u64, @maximum(stat_.size, 0)); toplevel: while (remain > 0) { - const amt = switch (Syscall.read(src_fd, buf[0..@minimum(buf.len, remain)])) { + const amt = switch (Syscall.read(src_fd, buf[0..@min(buf.len, remain)])) { .result => |result| result, .err => |err| return Maybe(Return.CopyFile){ .err = err.withPath(src) }, }; @@ -3052,7 +3052,7 @@ pub const NodeFS = struct { pub fn mkdtemp(this: *NodeFS, args: Arguments.MkdirTemp, comptime flavor: Flavor) Maybe(Return.Mkdtemp) { var prefix_buf = &this.sync_error_buf; - const len = @minimum(args.prefix.len, prefix_buf.len - 7); + const len = @min(args.prefix.len, prefix_buf.len - 7); if (len > 0) { @memcpy(prefix_buf, args.prefix.ptr, len); } @@ -3108,8 +3108,8 @@ pub const NodeFS = struct { // The sync version does no allocation except when returning the path .sync => { var buf = args.buffer.slice(); - buf = buf[@minimum(args.offset, buf.len)..]; - buf = buf[0..@minimum(buf.len, args.length)]; + buf = buf[@min(args.offset, buf.len)..]; + buf = buf[0..@min(buf.len, args.length)]; return switch (Syscall.read(args.fd, buf)) { .err => |err| .{ @@ -3134,8 +3134,8 @@ pub const NodeFS = struct { switch (comptime flavor) { .sync => { var buf = args.buffer.slice(); - buf = buf[@minimum(args.offset, buf.len)..]; - buf = buf[0..@minimum(buf.len, args.length)]; + buf = buf[@min(args.offset, buf.len)..]; + buf = buf[0..@min(buf.len, args.length)]; return switch (Syscall.pread(args.fd, buf, args.position.?)) { .err => |err| .{ @@ -3178,8 +3178,8 @@ pub const NodeFS = struct { switch (comptime flavor) { .sync => { var buf = args.buffer.slice(); - buf = buf[@minimum(args.offset, buf.len)..]; - buf = buf[0..@minimum(buf.len, args.length)]; + buf = buf[@min(args.offset, buf.len)..]; + buf = buf[0..@min(buf.len, args.length)]; return switch (Syscall.write(args.fd, buf)) { .err => |err| .{ @@ -3208,8 +3208,8 @@ pub const NodeFS = struct { switch (comptime flavor) { .sync => { var buf = args.buffer.slice(); - buf = buf[@minimum(args.offset, buf.len)..]; - buf = buf[0..@minimum(args.length, buf.len)]; + buf = buf[@min(args.offset, buf.len)..]; + buf = buf[0..@min(args.length, buf.len)]; return switch (Syscall.pwrite(args.fd, buf, position)) { .err => |err| .{ diff --git a/src/bun.js/node/syscall.zig b/src/bun.js/node/syscall.zig index c2d13ea59a8e93..705b263b64a7b0 100644 --- a/src/bun.js/node/syscall.zig +++ b/src/bun.js/node/syscall.zig @@ -224,7 +224,7 @@ const max_count = switch (builtin.os.tag) { }; pub fn write(fd: os.fd_t, bytes: []const u8) Maybe(usize) { - const adjusted_len = @minimum(max_count, bytes.len); + const adjusted_len = @min(max_count, bytes.len); while (true) { const rc = sys.write(fd, bytes.ptr, adjusted_len); @@ -247,7 +247,7 @@ else const fcntl_symbol = system.fcntl; pub fn pread(fd: os.fd_t, buf: []u8, offset: i64) Maybe(usize) { - const adjusted_len = @minimum(buf.len, max_count); + const adjusted_len = @min(buf.len, max_count); const ioffset = @bitCast(i64, offset); // the OS treats this as unsigned while (true) { const rc = pread_sym(fd, buf.ptr, adjusted_len, ioffset); @@ -266,7 +266,7 @@ else sys.pwrite; pub fn pwrite(fd: os.fd_t, bytes: []const u8, offset: i64) Maybe(usize) { - const adjusted_len = @minimum(bytes.len, max_count); + const adjusted_len = @min(bytes.len, max_count); const ioffset = @bitCast(i64, offset); // the OS treats this as unsigned while (true) { @@ -283,7 +283,7 @@ pub fn pwrite(fd: os.fd_t, bytes: []const u8, offset: i64) Maybe(usize) { } pub fn read(fd: os.fd_t, buf: []u8) Maybe(usize) { - const adjusted_len = @minimum(buf.len, max_count); + const adjusted_len = @min(buf.len, max_count); if (comptime Environment.isMac) { const rc = system.@"read$NOCANCEL"(fd, buf.ptr, adjusted_len); if (Maybe(usize).errnoSys(rc, .read)) |err| { @@ -526,7 +526,7 @@ pub fn mmapFile(path: [:0]const u8, flags: u32, wanted_size: ?usize, offset: usi }, }), offset) catch 0; - if (wanted_size) |size_| size = @minimum(size, size_); + if (wanted_size) |size_| size = @min(size, size_); const map = switch (mmap(null, size, os.PROT.READ | os.PROT.WRITE, flags, fd, offset)) { .result => |map| map, diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index df676a5516c2bb..075a80d3d18529 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -920,7 +920,7 @@ pub const FileSystemFlags = enum(Mode) { val.toZigString(&zig_str, ctx.ptr()); var buf: [4]u8 = .{ 0, 0, 0, 0 }; - @memcpy(&buf, zig_str.ptr, @minimum(buf.len, zig_str.len)); + @memcpy(&buf, zig_str.ptr, @min(buf.len, zig_str.len)); const Matcher = strings.ExactSizeMatcher(4); // https://github.com/nodejs/node/blob/8c3637cd35cca352794e2c128f3bc5e6b6c41380/lib/internal/fs/utils.js#L565 @@ -1631,7 +1631,7 @@ pub const Path = struct { if (u16_slice.len > 3) buf[3] = u16_slice[3]; - return std.fs.path.isAbsoluteWindowsWTF16(buf[0..@minimum(u16_slice.len, buf.len)]); + return std.fs.path.isAbsoluteWindowsWTF16(buf[0..@min(u16_slice.len, buf.len)]); } return std.fs.path.isAbsoluteWindows(zig_str.slice()); diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig index e33e0fbf2659bc..5dce0eac3cb818 100644 --- a/src/bun.js/test/jest.zig +++ b/src/bun.js/test/jest.zig @@ -495,7 +495,7 @@ pub const TestScope = struct { exception: js.ExceptionRef, is_only: bool, ) js.JSObjectRef { - var args = arguments[0..@minimum(arguments.len, 2)]; + var args = arguments[0..@min(arguments.len, 2)]; var label: string = ""; if (args.len == 0) { return this; diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig index 99b304bdab8f6a..6747b11d30d7fb 100644 --- a/src/bun.js/webcore/encoding.zig +++ b/src/bun.js/webcore/encoding.zig @@ -929,13 +929,13 @@ pub const Encoder = struct { switch (comptime encoding) { JSC.Node.Encoding.buffer => { - const written = @minimum(len, to_len); + const written = @min(len, to_len); @memcpy(to, input, written); return @intCast(i64, written); }, .latin1, .ascii => { - const written = @minimum(len, to_len); + const written = @min(len, to_len); @memcpy(to, input, written); // Hoping this gets auto vectorized @@ -1031,7 +1031,7 @@ pub const Encoder = struct { .latin1, JSC.Node.Encoding.ascii, JSC.Node.Encoding.ucs2, JSC.Node.Encoding.buffer, JSC.Node.Encoding.utf16le => { strings.copyU16IntoU8(to[0..to_len], []const u16, input[0..len]); - return @intCast(i64, @minimum(len, to_len)); + return @intCast(i64, @min(len, to_len)); }, JSC.Node.Encoding.hex => { diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index 36ed271fd7aaf6..fb282912265f2e 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -351,7 +351,7 @@ pub const Response = struct { if (args.nextEat()) |init| { if (init.isUndefinedOrNull()) {} else if (init.isNumber()) { - response.body.init.status_code = @intCast(u16, @minimum(@maximum(0, init.toInt32()), std.math.maxInt(u16))); + response.body.init.status_code = @intCast(u16, @min(@maximum(0, init.toInt32()), std.math.maxInt(u16))); } else { if (Body.Init.init(getAllocator(globalThis), globalThis, init, init.jsType()) catch null) |_init| { response.body.init = _init; @@ -397,7 +397,7 @@ pub const Response = struct { if (args.nextEat()) |init| { if (init.isUndefinedOrNull()) {} else if (init.isNumber()) { - response.body.init.status_code = @intCast(u16, @minimum(@maximum(0, init.toInt32()), std.math.maxInt(u16))); + response.body.init.status_code = @intCast(u16, @min(@maximum(0, init.toInt32()), std.math.maxInt(u16))); } else { if (Body.Init.init(getAllocator(globalThis), globalThis, init, init.jsType()) catch null) |_init| { response.body.init = _init; @@ -2055,7 +2055,7 @@ pub const Blob = struct { onRead, &this.read_completion, this.opened_fd, - remaining[0..@minimum(remaining.len, this.max_length - this.read_off)], + remaining[0..@min(remaining.len, this.max_length - this.read_off)], this.offset + this.read_off, ); @@ -2204,7 +2204,7 @@ pub const Blob = struct { } if (stat.size > 0 and std.os.S.ISREG(stat.mode)) { - this.size = @minimum( + this.size = @min( @truncate(SizeType, @intCast(SizeType, @maximum(@intCast(i64, stat.size), 0))), this.max_length, ); @@ -2424,7 +2424,7 @@ pub const Blob = struct { var file_offset = this.file_blob.offset; const end = - @minimum(this.file_blob.size, remain.len); + @min(this.file_blob.size, remain.len); while (remain.len > 0 and total_written < end) { const wrote_len = this.doWrite(remain, file_offset) catch { @@ -2849,7 +2849,7 @@ pub const Blob = struct { } if (stat.size != 0) { - this.max_length = @maximum(@minimum(@intCast(SizeType, stat.size), this.max_length), this.offset) - this.offset; + this.max_length = @maximum(@min(@intCast(SizeType, stat.size), this.max_length), this.offset) - this.offset; if (this.max_length == 0) { this.doClose(); return; @@ -3165,7 +3165,7 @@ pub const Blob = struct { relativeStart = @intCast(i64, @maximum(start + @intCast(i64, this.size), 0)); } else { // Otherwise, let relativeStart be start. - relativeStart = @minimum(@intCast(i64, start), @intCast(i64, this.size)); + relativeStart = @min(@intCast(i64, start), @intCast(i64, this.size)); } } @@ -3177,7 +3177,7 @@ pub const Blob = struct { relativeEnd = @intCast(i64, @maximum(end + @intCast(i64, this.size), 0)); } else { // Otherwise, let relativeStart be start. - relativeEnd = @minimum(@intCast(i64, end), @intCast(i64, this.size)); + relativeEnd = @min(@intCast(i64, end), @intCast(i64, this.size)); } } @@ -3261,7 +3261,7 @@ pub const Blob = struct { const offset = this.offset; const store_size = store.size(); if (store_size != Blob.max_size) { - this.offset = @minimum(store_size, offset); + this.offset = @min(store_size, offset); this.size = store_size - offset; } } @@ -3439,7 +3439,7 @@ pub const Blob = struct { if (slice_.len == 0) return ""; slice_ = slice_[this.offset..]; - return slice_[0..@minimum(slice_.len, @as(usize, this.size))]; + return slice_[0..@min(slice_.len, @as(usize, this.size))]; } pub const Lifetime = JSC.WebCore.Lifetime; @@ -3471,7 +3471,7 @@ pub const Blob = struct { const bytes = result.buf; const is_temporary = result.is_temporary; if (blob.size > 0) - blob.size = @minimum(@truncate(u32, bytes.len), blob.size); + blob.size = @min(@truncate(u32, bytes.len), blob.size); if (!is_temporary) { promise.resolve(globalThis, Function(&blob, globalThis, bytes, comptime lifetime)); } else { diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 2d7a3badd01ac3..d0f196291aa931 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -1112,7 +1112,7 @@ pub const FileSink = struct { defer this.written = total; const fd = this.fd; var remain = this.buffer.slice(); - remain = remain[@minimum(this.head, remain.len)..]; + remain = remain[@min(this.head, remain.len)..]; const initial_remain = remain; defer { std.debug.assert(total - initial == @ptrToInt(remain.ptr) - @ptrToInt(initial_remain.ptr)); @@ -1126,7 +1126,7 @@ pub const FileSink = struct { } while (remain.len > 0) { const max_to_write = if (std.os.S.ISFIFO(this.mode)) max_fifo_size else remain.len; - const write_buf = remain[0..@minimum(remain.len, max_to_write)]; + const write_buf = remain[0..@min(remain.len, max_to_write)]; const res = JSC.Node.Syscall.write(fd, write_buf); if (res == .err) { const retry = @@ -2054,7 +2054,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { // do not write more than available // if we do, it will cause this to be delayed until the next call, each time - const to_write = @minimum(@truncate(Blob.SizeType, write_offset), @as(Blob.SizeType, this.buffer.len)); + const to_write = @min(@truncate(Blob.SizeType, write_offset), @as(Blob.SizeType, this.buffer.len)); // figure out how much data exactly to write const readable = this.readableSlice()[0..to_write]; @@ -2089,7 +2089,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { if (!this.done and !this.requested_end and !this.hasBackpressure()) { const pending = @truncate(Blob.SizeType, write_offset) -| to_write; const written_after_flush = this.wrote - initial_wrote; - const to_report = pending - @minimum(written_after_flush, pending); + const to_report = pending - @min(written_after_flush, pending); if ((written_after_flush == initial_wrote and pending == 0) or to_report > 0) { this.signal.ready(to_report, null); @@ -2709,7 +2709,7 @@ pub const ByteBlobLoader = struct { this.* = ByteBlobLoader{ .offset = blobe.offset, .store = blobe.store.?, - .chunk_size = if (user_chunk_size > 0) @minimum(user_chunk_size, blobe.size) else @minimum(1024 * 1024 * 2, blobe.size), + .chunk_size = if (user_chunk_size > 0) @min(user_chunk_size, blobe.size) else @min(1024 * 1024 * 2, blobe.size), .remain = blobe.size, .done = false, }; @@ -2729,7 +2729,7 @@ pub const ByteBlobLoader = struct { var temporary = this.store.sharedView(); temporary = temporary[this.offset..]; - temporary = temporary[0..@minimum(buffer.len, @minimum(temporary.len, this.remain))]; + temporary = temporary[0..@min(buffer.len, @min(temporary.len, this.remain))]; if (temporary.len == 0) { this.store.deref(); this.done = true; @@ -2816,7 +2816,7 @@ pub const ByteStream = struct { } if (this.has_received_last_chunk) { - return .{ .chunk_size = @truncate(Blob.SizeType, @minimum(1024 * 1024 * 2, this.buffer.items.len)) }; + return .{ .chunk_size = @truncate(Blob.SizeType, @min(1024 * 1024 * 2, this.buffer.items.len)) }; } if (this.highWaterMark == 0) { @@ -2874,7 +2874,7 @@ pub const ByteStream = struct { if (this.pending.state == .pending) { std.debug.assert(this.buffer.items.len == 0); - var to_copy = this.pending_buffer[0..@minimum(chunk.len, this.pending_buffer.len)]; + var to_copy = this.pending_buffer[0..@min(chunk.len, this.pending_buffer.len)]; const pending_buffer_len = this.pending_buffer.len; std.debug.assert(to_copy.ptr != chunk.ptr); @memcpy(to_copy.ptr, chunk.ptr, to_copy.len); @@ -2961,7 +2961,7 @@ pub const ByteStream = struct { if (this.buffer.items.len > 0) { std.debug.assert(this.value() == .zero); - const to_write = @minimum( + const to_write = @min( this.buffer.items.len - this.offset, buffer.len, ); @@ -3167,7 +3167,7 @@ pub const FileBlobLoader = struct { var remaining = this.buf[this.concurrent.read..]; while (remaining.len > 0) { - const to_read = @minimum(@as(usize, this.concurrent.chunk_size), remaining.len); + const to_read = @min(@as(usize, this.concurrent.chunk_size), remaining.len); switch (Syscall.read(this.fd, remaining[0..to_read])) { .err => |err| { const retry = std.os.E.AGAIN; @@ -3390,9 +3390,9 @@ pub const FileBlobLoader = struct { @as(usize, default_fifo_chunk_size); return if (file.max_size > 0) - if (available_to_read != std.math.maxInt(usize)) @minimum(chunk_size, available_to_read) else @minimum(@maximum(this.total_read, file.max_size) - this.total_read, chunk_size) + if (available_to_read != std.math.maxInt(usize)) @min(chunk_size, available_to_read) else @min(@maximum(this.total_read, file.max_size) - this.total_read, chunk_size) else - @minimum(available_to_read, chunk_size); + @min(available_to_read, chunk_size); } pub fn onPullInto(this: *FileBlobLoader, buffer: []u8, view: JSC.JSValue) StreamResult { @@ -3541,7 +3541,7 @@ pub const FileBlobLoader = struct { u8, @intCast( usize, - @minimum( + @min( len, 1024 * 1024 * 4, ), @@ -3663,7 +3663,7 @@ pub const FileBlobLoader = struct { if (this.buf.len == 0) { return; } else { - this.buf.len = @minimum(this.buf.len, available_to_read); + this.buf.len = @min(this.buf.len, available_to_read); } this.pending.result = this.read( diff --git a/src/cli/colon_list_type.zig b/src/cli/colon_list_type.zig index 9aa4aaa9685bf8..bd309e1032e9b9 100644 --- a/src/cli/colon_list_type.zig +++ b/src/cli/colon_list_type.zig @@ -26,7 +26,7 @@ pub fn ColonListType(comptime t: type, value_resolver: anytype) type { // Support either ":" or "=" as the separator, preferring whichever is first. // ":" is less confusing IMO because that syntax is used with flags // but "=" is what esbuild uses and I want this to be somewhat familiar for people using esbuild - const midpoint = @minimum(strings.indexOfChar(str, ':') orelse std.math.maxInt(u32), strings.indexOfChar(str, '=') orelse std.math.maxInt(u32)); + const midpoint = @min(strings.indexOfChar(str, ':') orelse std.math.maxInt(u32), strings.indexOfChar(str, '=') orelse std.math.maxInt(u32)); if (midpoint == std.math.maxInt(u32)) { return error.InvalidSeparator; } diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index 9aa20875bbbaeb..b77f90fc52ef44 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -619,7 +619,7 @@ pub const UpgradeCommand = struct { Output.prettyErrorln( "error: The downloaded version of bun ({s}) doesn't match the expected version ({s}). Cancelled upgrade", .{ - version_string[0..@minimum(version_string.len, 512)], + version_string[0..@min(version_string.len, 512)], version_name, }, ); diff --git a/src/deps/picohttp.zig b/src/deps/picohttp.zig index 4a6848749bf608..33f305608628cd 100644 --- a/src/deps/picohttp.zig +++ b/src/deps/picohttp.zig @@ -185,7 +185,7 @@ pub const Response = struct { .minor_version = @intCast(usize, minor_version), .status_code = @intCast(usize, status_code), .status = status, - .headers = src[0..@minimum(num_headers, src.len)], + .headers = src[0..@min(num_headers, src.len)], .bytes_read = rc, }, }; diff --git a/src/deps/zig-clap/clap/comptime.zig b/src/deps/zig-clap/clap/comptime.zig index e3b3a8cdc5f2fb..f215991928d976 100644 --- a/src/deps/zig-clap/clap/comptime.zig +++ b/src/deps/zig-clap/clap/comptime.zig @@ -81,7 +81,7 @@ pub fn ComptimeClap( "TODO: implement stop_after_positional_at on windows", ); - var remaining_ = std.os.argv[@minimum(std.os.argv.len, stream.iter.args.inner.index)..]; + var remaining_ = std.os.argv[@min(std.os.argv.len, stream.iter.args.inner.index)..]; const first: []const u8 = if (remaining_.len > 0) bun.span(remaining_[0]) else ""; if (first.len > 0 and std.mem.eql(u8, first, "--")) { remaining_ = remaining_[1..]; diff --git a/src/env_loader.zig b/src/env_loader.zig index ca8da065218d98..4a37a586fa868c 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -164,7 +164,7 @@ pub const Lexer = struct { -1 => { lexer.end = lexer.current; - return lexer.source.contents[start..if (any_spaces) @minimum(last_non_space, lexer.source.contents.len) else lexer.source.contents.len]; + return lexer.source.contents[start..if (any_spaces) @min(last_non_space, lexer.source.contents.len) else lexer.source.contents.len]; }, '$' => { lexer.has_nested_value = true; @@ -182,13 +182,13 @@ pub const Lexer = struct { '\'' => { lexer.end = lexer.current; lexer.step(); - return lexer.source.contents[start..@minimum(lexer.end, lexer.source.contents.len)]; + return lexer.source.contents[start..@min(lexer.end, lexer.source.contents.len)]; }, implicitQuoteCharacter => { lexer.end = lexer.current; lexer.step(); - return lexer.source.contents[start..@minimum(if (any_spaces) last_non_space + 1 else lexer.end, lexer.end)]; + return lexer.source.contents[start..@min(if (any_spaces) last_non_space + 1 else lexer.end, lexer.end)]; }, '"' => { // We keep going @@ -201,7 +201,7 @@ pub const Lexer = struct { lexer.step(); lexer.was_quoted = was_quoted; - return lexer.source.contents[start..@minimum( + return lexer.source.contents[start..@min( lexer.end, lexer.source.contents.len, )]; @@ -293,7 +293,7 @@ pub const Lexer = struct { 0, -1 => { this.end = this.current; return if (last_non_space > this.start) - Variable{ .key = this.source.contents[this.start..@minimum(last_non_space + 1, this.source.contents.len)], .value = "" } + Variable{ .key = this.source.contents[this.start..@min(last_non_space + 1, this.source.contents.len)], .value = "" } else null; }, diff --git a/src/http.zig b/src/http.zig index fedb30a6560da1..a56594b9cb7169 100644 --- a/src/http.zig +++ b/src/http.zig @@ -2004,7 +2004,7 @@ pub const RequestContext = struct { @memset( handler.message_buffer.list.items.ptr, 0, - @minimum(handler.message_buffer.list.items.len, 128), + @min(handler.message_buffer.list.items.len, 128), ); } const build_result = handler.builder.build(request_id, cmd.timestamp, arena.allocator()) catch |err| { @@ -2683,11 +2683,11 @@ pub const RequestContext = struct { // This makes it Just Work if you pass a line/column number if (strings.indexOfChar(id, ':')) |colon| { - line = id[@minimum(id.len, colon + 1)..]; + line = id[@min(id.len, colon + 1)..]; id = id[0..colon]; if (strings.indexOfChar(line, ':')) |col| { - column = line[@minimum(line.len, col + 1)..]; + column = line[@min(line.len, col + 1)..]; line = line[0..col]; } } @@ -3444,7 +3444,7 @@ pub const Server = struct { defer listener.deinit(); server.websocket_threadpool.stack_size = @truncate( u32, - @minimum( + @min( @maximum(128_000, Fs.FileSystem.RealFS.Limit.stack), 4_000_000, ), diff --git a/src/http/url_path.zig b/src/http/url_path.zig index 973e8087813669..58bc060757c465 100644 --- a/src/http/url_path.zig +++ b/src/http/url_path.zig @@ -113,7 +113,7 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath { last_slash = @maximum(last_slash, i); if (i > 0) { - first_segment_end = @minimum(first_segment_end, i); + first_segment_end = @min(first_segment_end, i); } }, else => {}, @@ -140,7 +140,7 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath { var path = if (question_mark_i < 0) decoded_pathname[1..] else decoded_pathname[1..@intCast(usize, question_mark_i)]; - const first_segment = decoded_pathname[1..@minimum(@intCast(usize, first_segment_end), decoded_pathname.len)]; + const first_segment = decoded_pathname[1..@min(@intCast(usize, first_segment_end), decoded_pathname.len)]; const is_source_map = strings.eqlComptime(extname, "map"); var backup_extname: string = extname; if (is_source_map and path.len > ".map".len) { diff --git a/src/http/websocket_http_client.zig b/src/http/websocket_http_client.zig index 66592ea348bcbb..1f2f7f5d6a783b 100644 --- a/src/http/websocket_http_client.zig +++ b/src/http/websocket_http_client.zig @@ -312,7 +312,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { } } - const to_write = remain[0..@minimum(remain.len, data.len)]; + const to_write = remain[0..@min(remain.len, data.len)]; if (data.len > 0 and to_write.len > 0) { @memcpy(remain.ptr, data.ptr, to_write.len); this.body_written += to_write.len; @@ -412,17 +412,17 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { // return; // } - if (@minimum(upgrade_header.name.len, upgrade_header.value.len) == 0) { + if (@min(upgrade_header.name.len, upgrade_header.value.len) == 0) { this.terminate(ErrorCode.missing_upgrade_header); return; } - if (@minimum(connection_header.name.len, connection_header.value.len) == 0) { + if (@min(connection_header.name.len, connection_header.value.len) == 0) { this.terminate(ErrorCode.missing_connection_header); return; } - if (@minimum(websocket_accept_header.name.len, websocket_accept_header.value.len) == 0) { + if (@min(websocket_accept_header.name.len, websocket_accept_header.value.len) == 0) { this.terminate(ErrorCode.missing_websocket_accept_header); return; } @@ -475,7 +475,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { this.terminate(ErrorCode.failed_to_write); return; } - this.to_send = this.to_send[@minimum(@intCast(usize, wrote), this.to_send.len)..]; + this.to_send = this.to_send[@min(@intCast(usize, wrote), this.to_send.len)..]; } pub fn handleTimeout( this: *HTTPClient, @@ -1087,7 +1087,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { }, .ping => { - const ping_len = @minimum(data.len, @minimum(receive_body_remain, 125)); + const ping_len = @min(data.len, @min(receive_body_remain, 125)); this.ping_len = @truncate(u8, ping_len); if (ping_len > 0) { @@ -1102,7 +1102,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { if (data.len == 0) break; }, .pong => { - const pong_len = @minimum(data.len, @minimum(receive_body_remain, this.ping_frame_bytes.len)); + const pong_len = @min(data.len, @min(receive_body_remain, this.ping_frame_bytes.len)); data = data[pong_len..]; receive_state = .need_header; receiving_type = last_receive_data_type; @@ -1116,7 +1116,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { } if (data.len == 0) return; - const to_consume = @minimum(receive_body_remain, data.len); + const to_consume = @min(receive_body_remain, data.len); const consumed = this.consume(data[0..to_consume], receive_body_remain, last_receive_data_type, is_final); if (consumed == 0 and last_receive_data_type == .Text) { diff --git a/src/http_client_async.zig b/src/http_client_async.zig index 114181c6036470..2d85607e8bcb3c 100644 --- a/src/http_client_async.zig +++ b/src/http_client_async.zig @@ -794,7 +794,7 @@ pub fn hashHeaderName(name: string) u64 { var buf_slice: []u8 = std.mem.span(&buf); while (remain.len > 0) { - const end = @minimum(hasher.buf.len, remain.len); + const end = @min(hasher.buf.len, remain.len); hasher.update(strings.copyLowercase(std.mem.span(remain[0..end]), buf_slice)); remain = remain[end..]; @@ -1267,7 +1267,7 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s std.debug.assert(list.items.len == writer.context.items.len); if (this.state.request_body.len > 0 and list.capacity - list.items.len > 0) { var remain = list.items.ptr[list.items.len..list.capacity]; - const wrote = @minimum(remain.len, this.state.request_body.len); + const wrote = @min(remain.len, this.state.request_body.len); std.debug.assert(wrote > 0); @memcpy(remain.ptr, this.state.request_body.ptr, wrote); list.items.len += wrote; @@ -1364,7 +1364,7 @@ pub fn onData(this: *HTTPClient, comptime is_ssl: bool, incoming_data: []const u return; } - const to_read_len = @minimum(available.len, to_read.len); + const to_read_len = @min(available.len, to_read.len); req_msg.data.appendSliceAssumeCapacity(to_read[0..to_read_len]); to_read = req_msg.data.slice(); pending_buffers[1] = incoming_data[to_read_len..]; @@ -1407,7 +1407,7 @@ pub fn onData(this: *HTTPClient, comptime is_ssl: bool, incoming_data: []const u this.state.pending_response = response; - pending_buffers[0] = to_read[@minimum(@intCast(usize, response.bytes_read), to_read.len)..]; + pending_buffers[0] = to_read[@min(@intCast(usize, response.bytes_read), to_read.len)..]; if (pending_buffers[0].len == 0 and pending_buffers[1].len > 0) { pending_buffers[0] = pending_buffers[1]; pending_buffers[1] = ""; @@ -1721,7 +1721,7 @@ pub fn handleResponseBody(this: *HTTPClient, incoming_data: []const u8) !bool { } const remaining_content_length = this.state.body_size - buffer.list.items.len; - var remainder = incoming_data[0..@minimum(incoming_data.len, remaining_content_length)]; + var remainder = incoming_data[0..@min(incoming_data.len, remaining_content_length)]; _ = try buffer.write(remainder); diff --git a/src/install/dependency.zig b/src/install/dependency.zig index e97cc92f9682d0..50aef77e6eea7b 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -332,29 +332,29 @@ pub const Version = struct { // git://, git@, git+ssh 'g' => { if (strings.eqlComptime( - dependency[0..@minimum("git://".len, dependency.len)], + dependency[0..@min("git://".len, dependency.len)], "git://", ) or strings.eqlComptime( - dependency[0..@minimum("git@".len, dependency.len)], + dependency[0..@min("git@".len, dependency.len)], "git@", ) or strings.eqlComptime( - dependency[0..@minimum("git+ssh".len, dependency.len)], + dependency[0..@min("git+ssh".len, dependency.len)], "git+ssh", ) or strings.eqlComptime( - dependency[0..@minimum("git+file".len, dependency.len)], + dependency[0..@min("git+file".len, dependency.len)], "git+file", ) or strings.eqlComptime( - dependency[0..@minimum("git+http".len, dependency.len)], + dependency[0..@min("git+http".len, dependency.len)], "git+http", ) or strings.eqlComptime( - dependency[0..@minimum("git+https".len, dependency.len)], + dependency[0..@min("git+https".len, dependency.len)], "git+https", )) { return .git; } if (strings.eqlComptime( - dependency[0..@minimum("github".len, dependency.len)], + dependency[0..@min("github".len, dependency.len)], "github", ) or isGitHubRepoPath(dependency)) { return .github; @@ -379,21 +379,21 @@ pub const Version = struct { var remainder = dependency; if (strings.eqlComptime( - remainder[0..@minimum("https://".len, remainder.len)], + remainder[0..@min("https://".len, remainder.len)], "https://", )) { remainder = remainder["https://".len..]; } if (strings.eqlComptime( - remainder[0..@minimum("http://".len, remainder.len)], + remainder[0..@min("http://".len, remainder.len)], "http://", )) { remainder = remainder["http://".len..]; } if (strings.eqlComptime( - remainder[0..@minimum("github".len, remainder.len)], + remainder[0..@min("github".len, remainder.len)], "github", ) or isGitHubRepoPath(remainder)) { return .github; @@ -423,7 +423,7 @@ pub const Version = struct { return .tarball; if (strings.eqlComptime( - dependency[0..@minimum("file:".len, dependency.len)], + dependency[0..@min("file:".len, dependency.len)], "file:", )) { return .folder; @@ -442,7 +442,7 @@ pub const Version = struct { return .tarball; if (strings.eqlComptime( - dependency[0..@minimum("link:".len, dependency.len)], + dependency[0..@min("link:".len, dependency.len)], "link:", )) { return .symlink; @@ -458,7 +458,7 @@ pub const Version = struct { // workspace:// 'w' => { if (strings.eqlComptime( - dependency[0..@minimum("workspace://".len, dependency.len)], + dependency[0..@min("workspace://".len, dependency.len)], "workspace://", )) { return .workspace; diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index c74c0fb296395e..5a790beadce463 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -156,7 +156,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string { } } - var tmpname = try FileSystem.instance.tmpname(basename[0..@minimum(basename.len, 32)], &tmpname_buf, tgz_bytes.len); + var tmpname = try FileSystem.instance.tmpname(basename[0..@min(basename.len, 32)], &tmpname_buf, tgz_bytes.len); { var extract_destination = tmpdir.makeOpenPath(std.mem.span(tmpname), .{ .iterate = true }) catch |err| { Output.panic("err: {s} when create temporary directory named {s} (while extracting {s})", .{ @errorName(err), tmpname, name }); diff --git a/src/install/install.zig b/src/install/install.zig index bac5835cf2d540..d13773a5adafe8 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -112,11 +112,11 @@ pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type pub inline fn get(this: Slice, in: []const Type) []const Type { // it should be impossible to address this out of bounds due to the minimum here - return in.ptr[this.off..@minimum(in.len, this.off + this.len)]; + return in.ptr[this.off..@min(in.len, this.off + this.len)]; } pub inline fn mut(this: Slice, in: []Type) []Type { - return in.ptr[this.off..@minimum(in.len, this.off + this.len)]; + return in.ptr[this.off..@min(in.len, this.off + this.len)]; } pub fn init(buf: []const Type, in: []const Type) Slice { @@ -157,7 +157,7 @@ pub const Aligner = struct { pub fn write(comptime Type: type, comptime Writer: type, writer: Writer, pos: usize) !usize { const to_write = skipAmount(Type, pos); - var remainder: string = alignment_bytes_to_repeat_buffer[0..@minimum(to_write, alignment_bytes_to_repeat_buffer.len)]; + var remainder: string = alignment_bytes_to_repeat_buffer[0..@min(to_write, alignment_bytes_to_repeat_buffer.len)]; try writer.writeAll(remainder); return to_write; @@ -810,7 +810,7 @@ const PackageInstall = struct { // Heuristic: most package.jsons will be less than 2048 bytes. read = package_json_file.read(mutable.list.items[total..]) catch return false; - var remain = mutable.list.items[@minimum(total, read)..]; + var remain = mutable.list.items[@min(total, read)..]; if (read > 0 and remain.len < 1024) { mutable.growBy(4096) catch return false; mutable.list.expandToCapacity(); @@ -1875,7 +1875,7 @@ pub const PackageManager = struct { var available = buf[spanned.len..]; var end: []u8 = undefined; if (scope.url.hostname.len > 32 or available.len < 64) { - const visible_hostname = scope.url.hostname[0..@minimum(scope.url.hostname.len, 12)]; + const visible_hostname = scope.url.hostname[0..@min(scope.url.hostname.len, 12)]; end = std.fmt.bufPrint(available, "@@{s}__{x}", .{ visible_hostname, String.Builder.stringHash(scope.url.href) }) catch unreachable; } else { end = std.fmt.bufPrint(available, "@@{s}", .{scope.url.hostname}) catch unreachable; @@ -3779,7 +3779,7 @@ pub const PackageManager = struct { if (env_loader.map.get("BUN_CONFIG_HTTP_RETRY_COUNT")) |retry_count| { if (std.fmt.parseInt(i32, retry_count, 10)) |int| { - this.max_retry_count = @intCast(u16, @minimum(@maximum(int, 0), 65355)); + this.max_retry_count = @intCast(u16, @min(@maximum(int, 0), 65355)); } else |_| {} } @@ -4278,7 +4278,7 @@ pub const PackageManager = struct { if (env_loader.map.get("GOMAXPROCS")) |max_procs| { if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count_| { - cpu_count = @minimum(cpu_count, cpu_count_); + cpu_count = @min(cpu_count, cpu_count_); } else |_| {} } @@ -4350,7 +4350,7 @@ pub const PackageManager = struct { if (env_loader.map.get("GOMAXPROCS")) |max_procs| { if (std.fmt.parseInt(u32, max_procs, 10)) |cpu_count_| { - cpu_count = @minimum(cpu_count, cpu_count_); + cpu_count = @min(cpu_count, cpu_count_); } else |_| {} } diff --git a/src/install/integrity.zig b/src/install/integrity.zig index 3d3ab41c4ae707..bd4981b7428889 100644 --- a/src/install/integrity.zig +++ b/src/install/integrity.zig @@ -36,7 +36,7 @@ pub const Integrity = extern struct { // e.g. "3cd0599b099384b815c10f7fa7df0092b62d534f" var integrity = Integrity{ .tag = Tag.sha1 }; - const end: usize = @minimum("3cd0599b099384b815c10f7fa7df0092b62d534f".len, buf.len); + const end: usize = @min("3cd0599b099384b815c10f7fa7df0092b62d534f".len, buf.len); var out_i: usize = 0; var i: usize = 0; @@ -117,7 +117,7 @@ pub const Integrity = extern struct { pub fn parse(buf: []const u8) Tag { const Matcher = strings.ExactSizeMatcher(8); - const i = std.mem.indexOfScalar(u8, buf[0..@minimum(buf.len, 7)], '-') orelse return Tag.unknown; + const i = std.mem.indexOfScalar(u8, buf[0..@min(buf.len, 7)], '-') orelse return Tag.unknown; return switch (Matcher.match(buf[0..i])) { Matcher.case("sha1") => Tag.sha1, diff --git a/src/io/io_darwin.zig b/src/io/io_darwin.zig index a551f935b57a0e..104da050785dbb 100644 --- a/src/io/io_darwin.zig +++ b/src/io/io_darwin.zig @@ -859,7 +859,7 @@ fn flush_timeouts(self: *IO) ?u64 { const timeout_ns = expires - now; if (min_timeout) |min_ns| { - min_timeout = @minimum(min_ns, timeout_ns); + min_timeout = @min(min_ns, timeout_ns); } else { min_timeout = timeout_ns; } @@ -1604,7 +1604,7 @@ fn buffer_limit(buffer_len: usize) usize { .macos, .ios, .watchos, .tvos => std.math.maxInt(i32), else => std.math.maxInt(isize), }; - return @minimum(limit, buffer_len); + return @min(limit, buffer_len); } pub var global: IO = undefined; diff --git a/src/io/io_linux.zig b/src/io/io_linux.zig index 323dd03f945c99..374ce4a5560657 100644 --- a/src/io/io_linux.zig +++ b/src/io/io_linux.zig @@ -522,7 +522,7 @@ pub fn init(entries_: u12, flags: u32, waker: Waker) !IO { } if (limit.cur < 128 * 1024) { - entries = @minimum(256, entries); + entries = @min(256, entries); } } @@ -1734,5 +1734,5 @@ fn buffer_limit(buffer_len: usize) usize { .macos, .ios, .watchos, .tvos => std.math.maxInt(i32), else => std.math.maxInt(isize), }; - return @minimum(limit, buffer_len); + return @min(limit, buffer_len); } diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 1344974212afa6..3a5cd39993a7bb 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -735,7 +735,7 @@ fn NewLexer_( // Reset string literal const base = if (comptime quote == 0) lexer.start else lexer.start + 1; - lexer.string_literal_slice = lexer.source.contents[base..@minimum(lexer.source.contents.len, lexer.end - @as(usize, string_literal_details.suffix_len))]; + lexer.string_literal_slice = lexer.source.contents[base..@min(lexer.source.contents.len, lexer.end - @as(usize, string_literal_details.suffix_len))]; lexer.string_literal_is_ascii = !string_literal_details.needs_slow_path; lexer.string_literal_buffer.shrinkRetainingCapacity(0); if (string_literal_details.needs_slow_path) { diff --git a/src/js_parser.zig b/src/js_parser.zig index d88a9cc1a591f6..d7c897bce0941d 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -4363,7 +4363,7 @@ fn NewParser_( for (parts_) |part, i| { if (part.tag == .none) { stmts_count += part.stmts.len; - first_none_part = @minimum(i, first_none_part); + first_none_part = @min(i, first_none_part); } } @@ -17151,7 +17151,7 @@ fn NewParser_( { var array = expr.data.e_array; - array.items.len = @minimum(array.items.len, @truncate(u32, bound_array.items.len)); + array.items.len = @min(array.items.len, @truncate(u32, bound_array.items.len)); var slice = array.items.slice(); for (bound_array.items[0..array.items.len]) |item, item_i| { const child_expr = slice[item_i]; diff --git a/src/js_printer.zig b/src/js_printer.zig index 79faa989b98342..f5614d5f5c15f9 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -577,7 +577,7 @@ pub fn NewPrinter( var remaining: usize = n; while (remaining > 0) { - const to_write = @minimum(remaining, bytes.len); + const to_write = @min(remaining, bytes.len); try self.writeAll(bytes[0..to_write]); remaining -= to_write; } diff --git a/src/json_parser.zig b/src/json_parser.zig index 79c07257eb4c16..f690344205d835 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -467,7 +467,7 @@ pub const PackageJSONVersionChecker = struct { // first one wins if (key.data == .e_string and value.data == .e_string) { if (!p.has_found_name and strings.eqlComptime(key.data.e_string.data, "name")) { - const len = @minimum( + const len = @min( value.data.e_string.data.len, p.found_name_buf.len, ); @@ -476,7 +476,7 @@ pub const PackageJSONVersionChecker = struct { p.found_name = p.found_name_buf[0..len]; p.has_found_name = true; } else if (!p.has_found_version and strings.eqlComptime(key.data.e_string.data, "version")) { - const len = @minimum( + const len = @min( value.data.e_string.data.len, p.found_version_buf.len, ); diff --git a/src/linker.zig b/src/linker.zig index 9855dc82c668d0..7807d5cd6062eb 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -369,7 +369,7 @@ pub const Linker = struct { } if (package_name.len != text.len) { if (node_modules_bundle.getPackage(package_name)) |pkg| { - const import_path = text[@minimum(text.len, package_name.len + 1)..]; + const import_path = text[@min(text.len, package_name.len + 1)..]; if (node_modules_bundle.findModuleIDInPackageIgnoringExtension(pkg, import_path)) |found_module| { import_record.is_bundled = true; node_module_bundle_import_path = node_module_bundle_import_path orelse @@ -397,7 +397,7 @@ pub const Linker = struct { const package_name = runtime[0 .. strings.indexOfChar(runtime, '/') orelse runtime.len]; if (node_modules_bundle.getPackage(package_name)) |pkg| { - const import_path = runtime[@minimum(runtime.len, package_name.len + 1)..]; + const import_path = runtime[@min(runtime.len, package_name.len + 1)..]; if (node_modules_bundle.findModuleInPackage(pkg, import_path)) |found_module| { import_record.is_bundled = true; node_module_bundle_import_path = node_module_bundle_import_path orelse diff --git a/src/logger.zig b/src/logger.zig index df3095e23df7d3..75309c84326479 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -100,7 +100,7 @@ pub const Location = struct { pub fn count(this: Location, builder: *StringBuilder) void { builder.count(this.file); builder.count(this.namespace); - if (this.line_text) |text| builder.count(text[0..@minimum(text.len, 690)]); + if (this.line_text) |text| builder.count(text[0..@min(text.len, 690)]); if (this.suggestion) |text| builder.count(text); } @@ -556,7 +556,7 @@ pub const Range = packed struct { pub fn in(this: Range, buf: []const u8) []const u8 { if (this.loc.start < 0 or this.len <= 0) return ""; const slice = buf[@intCast(usize, this.loc.start)..]; - return slice[0..@minimum(@intCast(usize, this.len), buf.len)]; + return slice[0..@min(@intCast(usize, this.len), buf.len)]; } pub fn isEmpty(r: *const Range) bool { @@ -656,7 +656,7 @@ pub const Log = struct { const msgs: []const Msg = this.msgs.items; var errors_stack: [256]*anyopaque = undefined; - const count = @intCast(u16, @minimum(msgs.len, errors_stack.len)); + const count = @intCast(u16, @min(msgs.len, errors_stack.len)); switch (count) { 0 => return JSC.JSValue.jsUndefined(), 1 => { diff --git a/src/mdx/mdx_parser.zig b/src/mdx/mdx_parser.zig index 4261c0504d0f5f..c46b43f8f37dea 100644 --- a/src/mdx/mdx_parser.zig +++ b/src/mdx/mdx_parser.zig @@ -98,48 +98,48 @@ pub const Block = struct { } pub const Tag = enum { - /// ... + /// ... doc, - ///
...
+ ///
...
quote, ///
    ...
- ///Detail: Structure ul_detail. + ///Detail: Structure ul_detail. ul, ///
    ...
- ///Detail: Structure ol_detail. + ///Detail: Structure ol_detail. ol, ///
  • ...
  • - ///Detail: Structure li_detail. + ///Detail: Structure li_detail. li, - ///
    + ///
    hr, ///

    ...

    (for levels up to 6) - ///Detail: Structure h_detail. + ///Detail: Structure h_detail. h, ///
    ...
    ///Note the text lines within code blocks are terminated with '\n' - ///instead of explicit MD_TEXT_BR. + ///instead of explicit MD_TEXT_BR. code, /// Raw HTML block. This itself does not correspond to any particular HTML ///tag. The contents of it _is_ raw HTML source intended to be put - ///in verbatim form to the HTML output. + ///in verbatim form to the HTML output. html, - ///

    ...

    + ///

    ...

    p, /// ...
    and its contents. ///Detail: Structure table_detail (for table), /// structure td_detail (for th and td) - ///Note all of these are used only if extension MD_FLAG_TABLES is enabled. + ///Note all of these are used only if extension MD_FLAG_TABLES is enabled. table, thead, tbody, @@ -164,7 +164,7 @@ pub const Block = struct { task: bool = false, /// is_task, then one of 'x', 'X' or ' '. Undefined otherwise. task_mark: u8 = 'x', - /// If is_task, then offset in the input of the char between '[' and ']'. + /// If is_task, then offset in the input of the char between '[' and ']'. task_mark_off: u32 = 0, }; @@ -178,11 +178,11 @@ pub const Block = struct { }; pub const Table = struct { - /// Count of columns in the table. + /// Count of columns in the table. column_count: u32 = 0, - /// Count of rows in the table header (currently always 1) + /// Count of rows in the table header (currently always 1) head_row_count: u32 = 1, - /// Count of rows in the table body + /// Count of rows in the table body body_row_count: u32 = 0, }; @@ -250,20 +250,20 @@ pub const Span = struct { }; pub const Text = enum { - /// Normal text. + /// Normal text. normal, /// NULL character. CommonMark requires replacing NULL character with - /// the replacement char U+FFFD, so this allows caller to do that easily. + /// the replacement char U+FFFD, so this allows caller to do that easily. nullchar, /// Line breaks. /// Note these are not sent from blocks with verbatim output (MD_BLOCK_CODE - /// or MD_BLOCK_HTML). In such cases, '\n' is part of the text itself. - ///
    (hard break) + /// or MD_BLOCK_HTML). In such cases, '\n' is part of the text itself. + ///
    (hard break) br, - /// '\n' in source text where it is not semantically meaningful (soft break) + /// '\n' in source text where it is not semantically meaningful (soft break) softbr, /// Entity. - /// (a) Named entity, e.g.   + /// (a) Named entity, e.g.   /// (Note MD4C does not have a list of known entities. /// Anything matching the regexp /&[A-Za-z][A-Za-z0-9]{1,47};/ is /// treated as a named entity.) @@ -271,19 +271,19 @@ pub const Text = enum { /// (c) Hexadecimal entity, e.g. ካ /// /// As MD4C is mostly encoding agnostic, application gets the verbatim - /// entity text into the MD_PARSER::text_callback(). + /// entity text into the MD_PARSER::text_callback(). entity, /// Text in a code block (inside MD_BLOCK_CODE) or inlined code (`code`). /// If it is inside MD_BLOCK_CODE, it includes spaces for indentation and /// '\n' for new lines. br and softbr are not sent for this - /// kind of text. + /// kind of text. code, /// Text is a raw HTML. If it is contents of a raw HTML block (i.e. not /// an inline raw HTML), then br and softbr are not used. - /// The text contains verbatim '\n' for the new lines. + /// The text contains verbatim '\n' for the new lines. html, /// Text is inside an equation. This is processed the same way as inlined code - /// spans (`code`). + /// spans (`code`). latexmath, }; pub const Align = enum(u3) { @@ -339,15 +339,15 @@ pub const Mark = struct { ch: u8 = 0, flags: u16 = 0, - /// Maybe closer. + /// Maybe closer. pub const potential_closer = 0x02; - /// Maybe opener. + /// Maybe opener. pub const potential_opener = 0x01; - /// Definitely opener. + /// Definitely opener. pub const opener = 0x04; - /// Definitely closer. + /// Definitely closer. pub const closer = 0x08; - /// Resolved in any definite way. + /// Resolved in any definite way. pub const resolved = 0x10; /// Helper for the "rule of 3". */ @@ -476,33 +476,33 @@ pub const MDParser = struct { last_list_item_starts_with_two_blank_lines: bool = false, pub const Flags = enum { - /// In MD_TEXT_NORMAL, collapse non-trivial whitespace into single ' ' + /// In MD_TEXT_NORMAL, collapse non-trivial whitespace into single ' ' collapse_whitespace, - /// Do not require space in ATX headers ( ###header ) + /// Do not require space in ATX headers ( ###header ) permissive_atxheaders, - /// Recognize URLs as autolinks even without '<', '>' + /// Recognize URLs as autolinks even without '<', '>' permissive_url_autolinks, - /// Recognize e-mails as autolinks even without '<', '>' and 'mailto:' + /// Recognize e-mails as autolinks even without '<', '>' and 'mailto:' permissive_email_autolinks, - /// Disable indented code blocks. (Only fenced code works.) + /// Disable indented code blocks. (Only fenced code works.) noindented_codeblocks, - /// Disable raw HTML blocks. + /// Disable raw HTML blocks. no_html_blocks, - /// Disable raw HTML (inline). + /// Disable raw HTML (inline). no_html_spans, - /// Enable tables extension. + /// Enable tables extension. tables, - /// Enable strikethrough extension. + /// Enable strikethrough extension. strikethrough, - /// Enable WWW autolinks (even without any scheme prefix, if they begin with 'www.') + /// Enable WWW autolinks (even without any scheme prefix, if they begin with 'www.') permissive_www_autolinks, /// Enable task list extension. tasklists, - /// Enable $ and $$ containing LaTeX equations. + /// Enable $ and $$ containing LaTeX equations. latex_mathspans, - /// Enable wiki links extension. + /// Enable wiki links extension. wikilinks, - /// Enable underline extension (and disables '_' for normal emphasis). + /// Enable underline extension (and disables '_' for normal emphasis). underline, pub const Set = std.enums.EnumSet(Flags); @@ -684,7 +684,7 @@ pub const MDParser = struct { } // Check for ordered list item marks - max_end = @minimum(off + 9, this.size); + max_end = @min(off + 9, this.size); container.start = 0; while (off < max_end and std.ascii.isDigit(this.charAt(off))) { container.start = container.start * 10 + (this.charAt(off) - '0'); diff --git a/src/napi/napi.zig b/src/napi/napi.zig index 3beaa03d700b5b..dc685ffe1644e1 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -409,7 +409,7 @@ pub export fn napi_get_value_string_latin1(env: napi_env, value: napi_value, buf result.* = @intCast(@TypeOf(result.*), wrote); return .ok; } - const to_copy = @minimum(zig_str.len, buf_.len); + const to_copy = @min(zig_str.len, buf_.len); @memcpy(buf, zig_str.slice().ptr, to_copy); buf[to_copy] = 0; // if zero terminated, report the length of the string without the null @@ -476,7 +476,7 @@ pub export fn napi_get_value_string_utf8(env: napi_env, value: napi_value, buf_p return .ok; } - const to_copy = @minimum(zig_str.len, buf_.len); + const to_copy = @min(zig_str.len, buf_.len); @memcpy(buf, zig_str.slice().ptr, to_copy); buf[to_copy] = 0; if (result_ptr) |result| { @@ -531,7 +531,7 @@ pub export fn napi_get_value_string_utf16(env: napi_env, value: napi_value, buf_ return .ok; } - const to_copy = @minimum(zig_str.len, buf_.len) * 2; + const to_copy = @min(zig_str.len, buf_.len) * 2; @memcpy(std.mem.sliceAsBytes(buf_).ptr, std.mem.sliceAsBytes(zig_str.utf16SliceAligned()).ptr, to_copy); buf[to_copy] = 0; // if zero terminated, report the length of the string without the null @@ -706,7 +706,7 @@ pub export fn napi_is_arraybuffer(_: napi_env, value: napi_value, result: *bool) pub export fn napi_create_arraybuffer(env: napi_env, byte_length: usize, data: [*]const u8, result: *napi_value) napi_status { var typed_array = JSC.C.JSObjectMakeTypedArray(env.ref(), .kJSTypedArrayTypeArrayBuffer, byte_length, TODO_EXCEPTION); var array_buffer = JSValue.c(typed_array).asArrayBuffer(env) orelse return .generic_failure; - @memcpy(array_buffer.ptr, data, @minimum(array_buffer.len, @truncate(u32, byte_length))); + @memcpy(array_buffer.ptr, data, @min(array_buffer.len, @truncate(u32, byte_length))); result.* = JSValue.c(typed_array); return .ok; } diff --git a/src/report.zig b/src/report.zig index 8037c2899affd8..79e60f5be49085 100644 --- a/src/report.zig +++ b/src/report.zig @@ -192,7 +192,7 @@ pub fn fatal(err_: ?anyerror, msg_: ?string) void { if (msg_) |msg| { const msg_ptr = @ptrToInt(msg.ptr); if (msg_ptr > 0) { - const len = @maximum(@minimum(msg.len, 1024), 0); + const len = @maximum(@min(msg.len, 1024), 0); if (len > 0) { if (Output.isEmojiEnabled()) { @@ -349,7 +349,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\Current limit: {d} \\ \\To fix this, try running: - \\ + \\ \\ sudo launchctl limit maxfiles 2147483646 \\ ulimit -n 2147483646 \\ @@ -368,7 +368,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\Current limit: {d} \\ \\To fix this, try running: - \\ + \\ \\ sudo echo -e "\nfs.file-max=2147483646\n" >> /etc/sysctl.conf \\ sudo sysctl -p \\ ulimit -n 2147483646 @@ -410,7 +410,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\Current limit: {d} \\ \\To fix this, try running: - \\ + \\ \\ ulimit -n 2147483646 \\ \\You may also need to run: @@ -430,7 +430,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\Current limit: {d} \\ \\To fix this, try running: - \\ + \\ \\ ulimit -n 2147483646 \\ \\That will only work for the current shell. To fix this for the entire system, run: @@ -474,7 +474,7 @@ pub noinline fn globalError(err: anyerror) noreturn { \\Current limit: {d} \\ \\To fix this, try running: - \\ + \\ \\ ulimit -n 2147483646 \\ , diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index 5377df5568df62..9c39703f1f05e5 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -1320,7 +1320,7 @@ pub const ESModule = struct { } package.name = specifier[0 .. at + offset]; - parseSubpath(&package.subpath, specifier[@minimum(package.name.len + package.version.len + 1, specifier.len)..], subpath_buf); + parseSubpath(&package.subpath, specifier[@min(package.name.len + package.version.len + 1, specifier.len)..], subpath_buf); } else { parseSubpath(&package.subpath, specifier[package.name.len..], subpath_buf); } diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index 40e0a778808c6e..00c0c10ad6c293 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -40,7 +40,7 @@ inline fn @"is ../"(slice: []const u8) bool { pub fn longestCommonPathGeneric(input: []const []const u8, comptime separator: u8, comptime isPathSeparator: IsSeparatorFunc) []const u8 { var min_length: usize = std.math.maxInt(usize); for (input) |str| { - min_length = @minimum(str.len, min_length); + min_length = @min(str.len, min_length); } var index: usize = 0; @@ -192,7 +192,7 @@ pub fn relativeToCommonPath( const common_path = if (has_leading_separator) _common_path[1..] else _common_path; - const shortest = @minimum(normalized_from.len, normalized_to.len); + const shortest = @min(normalized_from.len, normalized_to.len); var last_common_separator = std.mem.lastIndexOfScalar(u8, _common_path, separator) orelse 0; diff --git a/src/router.zig b/src/router.zig index 2fbb53373f5925..dac234faaaa36b 100644 --- a/src/router.zig +++ b/src/router.zig @@ -558,7 +558,7 @@ pub const Route = struct { pub fn sortByNameString(_: @This(), lhs: string, rhs: string) bool { const math = std.math; - const n = @minimum(lhs.len, rhs.len); + const n = @min(lhs.len, rhs.len); var i: usize = 0; while (i < n) : (i += 1) { switch (math.order(sort_table[lhs[i]], sort_table[rhs[i]])) { @@ -1259,7 +1259,7 @@ const Pattern = struct { if (input.len == 0 or input.len <= @as(usize, offset)) return Pattern{ .value = .{ .static = HashedString.empty }, - .len = @truncate(RoutePathInt, @minimum(input.len, @as(usize, offset))), + .len = @truncate(RoutePathInt, @min(input.len, @as(usize, offset))), }; var i: RoutePathInt = offset; @@ -1272,7 +1272,7 @@ const Pattern = struct { while (i <= end) : (i += 1) { switch (input[i]) { '/' => { - return Pattern{ .len = @minimum(i + 1, end), .value = .{ .static = initHashedString(input[offset..i]) } }; + return Pattern{ .len = @min(i + 1, end), .value = .{ .static = initHashedString(input[offset..i]) } }; }, '[' => { if (i > offset) { @@ -1339,7 +1339,7 @@ const Pattern = struct { if (@enumToInt(tag) > @enumToInt(Tag.dynamic) and i <= end) return error.CatchAllMustBeAtTheEnd; return Pattern{ - .len = @minimum(i + 1, end), + .len = @min(i + 1, end), .value = switch (tag) { .dynamic => .{ .dynamic = param, diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 92bd6189cf2271..84545b977853e5 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -542,7 +542,7 @@ pub fn decodeVLQ(encoded: []const u8, start: usize) VLQResult { var vlq: u32 = 0; // hint to the compiler what the maximum value is - const encoded_ = encoded[start..][0..@minimum(encoded.len - start, comptime (vlq_max_in_bytes + 1))]; + const encoded_ = encoded[start..][0..@min(encoded.len - start, comptime (vlq_max_in_bytes + 1))]; // inlining helps for the 1 or 2 byte case, hurts a little for larger comptime var i: usize = 0; diff --git a/src/sourcemap/vlq_bench.zig b/src/sourcemap/vlq_bench.zig index e6ea2724f9e5f2..2116025f928b1f 100644 --- a/src/sourcemap/vlq_bench.zig +++ b/src/sourcemap/vlq_bench.zig @@ -106,7 +106,7 @@ const SourceMap = struct { var vlq: u32 = 0; // hint to the compiler what the maximum value is - const encoded_ = encoded[start..][0..@minimum(encoded.len - start, comptime (vlq_max_in_bytes + 1))]; + const encoded_ = encoded[start..][0..@min(encoded.len - start, comptime (vlq_max_in_bytes + 1))]; // inlining helps for the 1 or 2 byte case, hurts a little for larger comptime var i: usize = 0; diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 9a016502072622..8b9f50dcce57b3 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -184,7 +184,7 @@ pub inline fn indexOf(self: string, str: string) ?usize { // var start: usize = end - n; // while (end < buf.len) { // start = end - n; -// const last_end = @minimum(end + k - 1, buf.len); +// const last_end = @min(end + k - 1, buf.len); // const last_start = last_end - n; // // Look for the first character in the delimter @@ -200,7 +200,7 @@ pub inline fn indexOf(self: string, str: string) ?usize { // } // } // } -// end = @minimum(end + n, buf.len); +// end = @min(end + n, buf.len); // } // if (start < buf.len) return std.mem.indexOfPos(T, buf, start_index, delimiter); // return null; // Not found @@ -831,7 +831,7 @@ pub inline fn copyU8IntoU16(output_: []u16, input_: []const u8) void { var input_ptr = input.ptr; var output_ptr = output.ptr; - const last_input_ptr = input_ptr + @minimum(input.len, output.len); + const last_input_ptr = input_ptr + @min(input.len, output.len); while (last_input_ptr != input_ptr) { output_ptr[0] = input_ptr[0]; @@ -915,7 +915,7 @@ pub inline fn copyU16IntoU8(output_: []u8, comptime InputType: type, input_: Inp var input_ptr = input.ptr; var output_ptr = output.ptr; - const last_input_ptr = input_ptr + @minimum(input.len, output.len); + const last_input_ptr = input_ptr + @min(input.len, output.len); while (last_input_ptr != input_ptr) { output_ptr[0] = @truncate(u8, input_ptr[0]); @@ -1346,7 +1346,7 @@ pub fn copyLatin1IntoUTF8(buf_: []u8, comptime Type: type, latin1_: Type) Encode pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_: Type, comptime stop: bool) EncodeIntoResult { if (comptime bun.FeatureFlags.latin1_is_now_ascii) { - const to_copy = @truncate(u32, @minimum(buf_.len, latin1_.len)); + const to_copy = @truncate(u32, @min(buf_.len, latin1_.len)); @memcpy(buf_.ptr, latin1_.ptr, to_copy); return .{ .written = to_copy, .read = to_copy }; } @@ -1355,7 +1355,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_ var latin1 = latin1_; while (buf.len > 0 and latin1.len > 0) { inner: { - var remaining_runs = @minimum(buf.len, latin1.len) / ascii_vector_size; + var remaining_runs = @min(buf.len, latin1.len) / ascii_vector_size; while (remaining_runs > 0) : (remaining_runs -= 1) { const vec: AsciiVector = latin1[0..ascii_vector_size].*; @@ -1418,7 +1418,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_ { const Int = u64; const size = @sizeOf(Int); - while (@minimum(buf.len, latin1.len) >= size) { + while (@min(buf.len, latin1.len) >= size) { const bytes = @bitCast(Int, latin1[0..size].*); buf[0..size].* = @bitCast([size]u8, bytes); @@ -1446,7 +1446,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_ } { - const end = latin1.ptr + @minimum(buf.len, latin1.len); + const end = latin1.ptr + @min(buf.len, latin1.len); assert(@ptrToInt(latin1.ptr + 8) > @ptrToInt(end)); const start_ptr = @ptrToInt(buf.ptr); const start_ptr_latin1 = @ptrToInt(latin1.ptr); @@ -2283,12 +2283,12 @@ pub fn copyUTF16IntoUTF8(buf: []u8, comptime Type: type, utf16: Type) EncodeInto var ended_on_non_ascii = false; while (firstNonASCII16(Type, utf16_remaining)) |i| { - const end = @minimum(i, remaining.len); + const end = @min(i, remaining.len); if (end > 0) copyU16IntoU8(remaining, Type, utf16_remaining[0..end]); remaining = remaining[end..]; utf16_remaining = utf16_remaining[end..]; - if (@minimum(utf16_remaining.len, remaining.len) == 0) + if (@min(utf16_remaining.len, remaining.len) == 0) break; const replacement = utf16Codepoint(Type, utf16_remaining); @@ -2305,7 +2305,7 @@ pub fn copyUTF16IntoUTF8(buf: []u8, comptime Type: type, utf16: Type) EncodeInto } if (remaining.len > 0 and !ended_on_non_ascii and utf16_remaining.len > 0) { - const len = @minimum(remaining.len, utf16_remaining.len); + const len = @min(remaining.len, utf16_remaining.len); copyU16IntoU8(remaining[0..len], Type, utf16_remaining[0..len]); utf16_remaining = utf16_remaining[len..]; remaining = remaining[len..]; @@ -2347,7 +2347,7 @@ pub fn elementLengthUTF8IntoUTF16(comptime Type: type, utf8: Type) usize { const replacement = utf16Codepoint(Type, utf8_remaining); count += replacement.len; - utf8_remaining = utf8_remaining[@minimum(replacement.utf8Width(), utf8_remaining.len)..]; + utf8_remaining = utf8_remaining[@min(replacement.utf8Width(), utf8_remaining.len)..]; } return count + utf8_remaining.len; @@ -3257,7 +3257,7 @@ pub fn formatLatin1(slice_: []const u8, writer: anytype) !void { try writer.writeAll(slice[0..i]); slice = slice[i..]; } - const result = strings.copyLatin1IntoUTF8(&chunk, @TypeOf(slice), slice[0..@minimum(chunk.len, slice.len)]); + const result = strings.copyLatin1IntoUTF8(&chunk, @TypeOf(slice), slice[0..@min(chunk.len, slice.len)]); if (result.read == 0 or result.written == 0) break; try writer.writeAll(chunk[0..result.written]); @@ -3493,7 +3493,7 @@ pub fn NewCodePointIterator(comptime CodePointType: type, comptime zeroValue: co const cp_len = utf8ByteSequenceLength(bytes[next_]); it.next_width = cp_len; - it.i = @minimum(next_, bytes.len); + it.i = @min(next_, bytes.len); const slice = bytes[prev..][0..cp_len]; it.width = @intCast(u3, slice.len); diff --git a/src/string_joiner.zig b/src/string_joiner.zig index 03d63364e6f3ff..e929bb5afaddea 100644 --- a/src/string_joiner.zig +++ b/src/string_joiner.zig @@ -40,7 +40,7 @@ pub fn done(this: *Joiner, allocator: std.mem.Allocator) ![]u8 { const to_join = join.data.slice[join.data.offset..]; @memcpy(remaining.ptr, to_join.ptr, to_join.len); - remaining = remaining[@minimum(remaining.len, to_join.len)..]; + remaining = remaining[@min(remaining.len, to_join.len)..]; var prev = join; el_ = join.next; diff --git a/src/watcher.zig b/src/watcher.zig index 0663303bce7a69..d21aa4aafc1b44 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -464,7 +464,7 @@ pub fn NewWatcher(comptime ContextType: type) type { const eventlist_index = this.watchlist.items(.eventlist_index); while (remaining_events > 0) { - const slice = events[0..@minimum(remaining_events, this.watch_events.len)]; + const slice = events[0..@min(remaining_events, this.watch_events.len)]; var watchevents = this.watch_events[0..slice.len]; var watch_event_id: u32 = 0; for (slice) |event| { From 521cbfc8e17a726b57851dc7fc46a3841ab26071 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:03:43 +0100 Subject: [PATCH 02/51] empty test name must be omitted --- src/resolver/resolve_path.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index 00c0c10ad6c293..d6bf4f1f58850c 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -1257,6 +1257,6 @@ test "longestCommonPath" { _ = t.expect("/app/public/", longestCommonPath(more[0..2]), @src()); } -test "" { +test { @import("std").testing.refAllDecls(@This()); } From 101997fbb08ffdd294edc024fb17739f717da156 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:04:44 +0100 Subject: [PATCH 03/51] Fix: invalid builtin function: '@maximum' --- misctools/http_bench.zig | 2 +- src/bun.js/api/bun.zig | 4 ++-- src/bun.js/api/bun/subprocess.zig | 2 +- src/bun.js/api/html_rewriter.zig | 12 ++++++------ src/bun.js/api/server.zig | 8 ++++---- src/bun.js/base.zig | 4 ++-- src/bun.js/bindings/bindings.zig | 4 ++-- src/bun.js/javascript.zig | 16 ++++++++-------- src/bun.js/node/node_fs.zig | 6 +++--- src/bun.js/node/syscall.zig | 2 +- src/bun.js/node/types.zig | 12 ++++++------ src/bun.js/webcore/response.zig | 16 ++++++++-------- src/bun.js/webcore/streams.zig | 16 ++++++++-------- src/cli.zig | 2 +- src/cli/add_completions.zig | 2 +- src/cli/upgrade_command.zig | 6 +++--- src/css_scanner.zig | 2 +- src/fs.zig | 2 +- src/http.zig | 2 +- src/http/url_path.zig | 6 +++--- src/http_client_async.zig | 2 +- src/install/install.zig | 10 +++++----- src/install/integrity.zig | 2 +- src/install/npm.zig | 16 ++++++++-------- src/install/resolvers/folder_resolver.zig | 2 +- src/js_ast.zig | 10 +++++----- src/js_printer.zig | 10 +++++----- src/libarchive/libarchive.zig | 2 +- src/logger.zig | 4 ++-- src/mdx/mdx_parser.zig | 2 +- src/network_thread.zig | 14 +++++++------- src/report.zig | 6 +++--- src/router.zig | 6 +++--- src/sourcemap/sourcemap.zig | 6 +++--- src/string_immutable.zig | 4 ++-- src/string_types.zig | 2 +- src/url.zig | 2 +- src/watcher.zig | 2 +- src/work_pool.zig | 2 +- 39 files changed, 115 insertions(+), 115 deletions(-) diff --git a/misctools/http_bench.zig b/misctools/http_bench.zig index fe2d29d6a43b2e..acf3aac317e908 100644 --- a/misctools/http_bench.zig +++ b/misctools/http_bench.zig @@ -252,7 +252,7 @@ pub fn main() anyerror!void { fail_count += 1; } - max_duration = @maximum(max_duration, http.elapsed); + max_duration = @max(max_duration, http.elapsed); min_duration = @min(min_duration, http.elapsed); switch (resp.status_code) { diff --git a/src/bun.js/api/bun.zig b/src/bun.js/api/bun.zig index 2fa6839d6c70e1..58407e2c059e39 100644 --- a/src/bun.js/api/bun.zig +++ b/src/bun.js/api/bun.zig @@ -1706,7 +1706,7 @@ pub fn allocUnsafe( const length = @intCast( usize, @min( - @maximum(1, (args.nextEat() orelse JSC.JSValue.jsNumber(@as(i32, 1))).toInt32()), + @max(1, (args.nextEat() orelse JSC.JSValue.jsNumber(@as(i32, 1))).toInt32()), std.math.maxInt(i32), ), ); @@ -2434,7 +2434,7 @@ pub const Timer = struct { // We don't deal with nesting levels directly // but we do set the minimum timeout to be 1ms for repeating timers - const interval: i32 = @maximum( + const interval: i32 = @max( countdown.coerce(i32, globalThis), if (repeat) @as(i32, 1) else 0, ); diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index 4f6a910347486a..1dbc045d3c0bb5 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -505,7 +505,7 @@ pub const Subprocess = struct { pub fn readAll(this: *BufferedOutput, comptime force: bool) void { // read as much as we can from the pipe while (this.internal_buffer.len <= this.max_internal_buffer) { - var buffer_: [@maximum(std.mem.page_size, 16384)]u8 = undefined; + var buffer_: [@max(std.mem.page_size, 16384)]u8 = undefined; var buf: []u8 = buffer_[0..]; diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig index eb602223be54bf..a3632bf329e3cd 100644 --- a/src/bun.js/api/html_rewriter.zig +++ b/src/bun.js/api/html_rewriter.zig @@ -308,7 +308,7 @@ pub const HTMLRewriter = struct { doc.ctx = this; } - const chunk_size = @maximum(size_hint orelse 16384, 1024); + const chunk_size = @max(size_hint orelse 16384, 1024); this.rewriter = builder.build( .UTF8, .{ @@ -418,7 +418,7 @@ pub const HTMLRewriter = struct { .preallocated_parsing_buffer_size = if (input_size == JSC.WebCore.Blob.max_size) 1024 else - @maximum(input_size, 1024), + @max(input_size, 1024), .max_allowed_memory_usage = std.math.maxInt(u32), }, false, @@ -594,7 +594,7 @@ pub const HTMLRewriter = struct { // sink.rewriter = builder.build( // .UTF8, // .{ - // .preallocated_parsing_buffer_size = @maximum(original.body.len(), 1024), + // .preallocated_parsing_buffer_size = @max(original.body.len(), 1024), // .max_allowed_memory_usage = std.math.maxInt(u32), // }, // false, @@ -1467,13 +1467,13 @@ pub const AttributeIterator = struct { \\ } \\ \\ #iterator; - \\ + \\ \\ [Symbol.iterator]() { \\ return this; \\ } - \\ + \\ \\ next() { - \\ if (this.#iterator === null) + \\ if (this.#iterator === null) \\ return {done: true}; \\ var value = this.#iterator.next(); \\ if (!value) { diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 4d1d1369386782..6639195f1f2f5e 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -305,7 +305,7 @@ pub const ServerConfig = struct { args.port = @intCast( u16, @min( - @maximum(0, port_.coerce(i32, global)), + @max(0, port_.coerce(i32, global)), std.math.maxInt(u16), ), ); @@ -358,7 +358,7 @@ pub const ServerConfig = struct { } if (arg.getTruthy(global, "maxRequestBodySize")) |max_request_body_size| { - args.max_request_body_size = @intCast(u64, @maximum(0, max_request_body_size.toInt64())); + args.max_request_body_size = @intCast(u64, @max(0, max_request_body_size.toInt64())); } if (arg.getTruthy(global, "error")) |onError| { @@ -2515,7 +2515,7 @@ pub const WebSocketServer = struct { globalObject.throwInvalidArguments("websocket expects maxPayloadLength to be an integer", .{}); return null; } - server.maxPayloadLength = @intCast(u32, @truncate(i33, @maximum(value.toInt64(), 0))); + server.maxPayloadLength = @intCast(u32, @truncate(i33, @max(value.toInt64(), 0))); } } if (object.get(globalObject, "idleTimeout")) |value| { @@ -2535,7 +2535,7 @@ pub const WebSocketServer = struct { return null; } - server.backpressureLimit = @intCast(u32, @truncate(i33, @maximum(value.toInt64(), 0))); + server.backpressureLimit = @intCast(u32, @truncate(i33, @max(value.toInt64(), 0))); } } // if (object.get(globalObject, "sendPings")) |value| { diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index cdaa788bcf6ccd..167ffb26fbbff5 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -1421,12 +1421,12 @@ pub fn NewClassWithInstanceType( var middle_padding: usize = 0; if (property_names.len > 0) { for (property_names) |prop| { - middle_padding = @maximum(prop.len, middle_padding); + middle_padding = @max(prop.len, middle_padding); } } if (function_names.len > 0) { for (function_names[0..function_names.len]) |_name| { - middle_padding = @maximum(std.mem.span(_name).len, middle_padding); + middle_padding = @max(std.mem.span(_name).len, middle_padding); } } diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index 3c60997401fd2c..9240ebf2599347 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -2802,7 +2802,7 @@ pub const JSValue = enum(JSValueReprInt) { ?*JSInternalPromise => asInternalPromise(this), ?*JSPromise => asPromise(this), - u52 => @truncate(u52, @intCast(u64, @maximum(this.toInt64(), 0))), + u52 => @truncate(u52, @intCast(u64, @max(this.toInt64(), 0))), u64 => toUInt64NoTruncate(this), u8 => @truncate(u8, toU32(this)), i16 => @truncate(i16, toInt32(this)), @@ -3497,7 +3497,7 @@ pub const JSValue = enum(JSValueReprInt) { } pub inline fn toU32(this: JSValue) u32 { - return @intCast(u32, @maximum(this.toInt32(), 0)); + return @intCast(u32, @max(this.toInt32(), 0)); } pub fn getLengthOfArray(this: JSValue, globalThis: *JSGlobalObject) u32 { diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index adcc33ab84333e..bb0e8d7b88a635 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -604,7 +604,7 @@ pub const VirtualMachine = struct { @intCast( u128, // handle if they set their system clock to be before epoch - @maximum( + @max( std.time.nanoTimestamp(), origin_relative_epoch, ), @@ -1580,8 +1580,8 @@ pub const VirtualMachine = struct { if (frames[i].position.isInvalid()) continue; if (this.source_mappings.resolveMapping( frames[i].source_url.slice(), - @maximum(frames[i].position.line, 0), - @maximum(frames[i].position.column_start, 0), + @max(frames[i].position.line, 0), + @max(frames[i].position.column_start, 0), )) |mapping| { frames[i].position.line = mapping.original.lines; frames[i].position.column_start = mapping.original.columns; @@ -1634,8 +1634,8 @@ pub const VirtualMachine = struct { var top = &frames[0]; if (this.source_mappings.resolveMapping( top.source_url.slice(), - @maximum(top.position.line, 0), - @maximum(top.position.column_start, 0), + @max(top.position.line, 0), + @max(top.position.column_start, 0), )) |mapping| { var log = logger.Log.init(default_allocator); var errorable: ErrorableResolvedSource = undefined; @@ -1684,8 +1684,8 @@ pub const VirtualMachine = struct { if (frame.position.isInvalid()) continue; if (this.source_mappings.resolveMapping( frame.source_url.slice(), - @maximum(frame.position.line, 0), - @maximum(frame.position.column_start, 0), + @max(frame.position.line, 0), + @max(frame.position.column_start, 0), )) |mapping| { frame.position.line = mapping.original.lines; frame.remapped = true; @@ -1703,7 +1703,7 @@ pub const VirtualMachine = struct { var line_numbers = exception.stack.source_lines_numbers[0..exception.stack.source_lines_len]; var max_line: i32 = -1; - for (line_numbers) |line| max_line = @maximum(max_line, line); + for (line_numbers) |line| max_line = @max(max_line, line); const max_line_number_pad = std.fmt.count("{d}", .{max_line}); var source_lines = exception.stack.sourceLineIterator(); diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 0abef19c0e516f..edbe7158672395 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -2509,7 +2509,7 @@ pub const NodeFS = struct { // 16 KB is high end of what is okay to use for stack space // good thing we ask for absurdly large stack sizes var buf: [16384]u8 = undefined; - var remain = @intCast(u64, @maximum(stat_.size, 0)); + var remain = @intCast(u64, @max(stat_.size, 0)); toplevel: while (remain > 0) { const amt = switch (Syscall.read(src_fd, buf[0..@min(buf.len, remain)])) { .result => |result| result, @@ -2607,7 +2607,7 @@ pub const NodeFS = struct { .err => |err| return Maybe(Return.CopyFile){ .err = err }, }; - var size = @intCast(usize, @maximum(stat_.size, 0)); + var size = @intCast(usize, @max(stat_.size, 0)); defer { _ = linux.ftruncate(dest_fd, @intCast(i64, @truncate(u63, wrote))); @@ -3374,7 +3374,7 @@ pub const NodeFS = struct { }; // For certain files, the size might be 0 but the file might still have contents. - const size = @intCast(u64, @maximum(stat_.size, 0)); + const size = @intCast(u64, @max(stat_.size, 0)); var buf = std.ArrayList(u8).init(bun.default_allocator); buf.ensureTotalCapacityPrecise(size + 16) catch unreachable; diff --git a/src/bun.js/node/syscall.zig b/src/bun.js/node/syscall.zig index 705b263b64a7b0..105f9050db1990 100644 --- a/src/bun.js/node/syscall.zig +++ b/src/bun.js/node/syscall.zig @@ -556,7 +556,7 @@ pub const Error = struct { const errno_values = std.enums.values(os.E); var err = @enumToInt(os.E.SUCCESS); for (errno_values) |errn| { - err = @maximum(err, @enumToInt(errn)); + err = @max(err, @enumToInt(errn)); } break :brk err; }; diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 075a80d3d18529..07ccc8decef225 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -774,7 +774,7 @@ pub fn timeLikeFromJS(ctx: JSC.C.JSContextRef, value_: JSC.JSValue, exception: J return null; } - return @floatToInt(TimeLike, @maximum(@floor(seconds), std.math.minInt(TimeLike))); + return @floatToInt(TimeLike, @max(@floor(seconds), std.math.minInt(TimeLike))); } pub fn modeFromJS(ctx: JSC.C.JSContextRef, value: JSC.JSValue, exception: JSC.C.ExceptionRef) ?Mode { @@ -1128,9 +1128,9 @@ fn StatsLike(comptime name: [:0]const u8, comptime T: type) type { .atime_ms = @truncate(T, @intCast(i64, if (atime.tv_nsec > 0) (@intCast(usize, atime.tv_nsec) / std.time.ns_per_ms) else 0)), .mtime_ms = @truncate(T, @intCast(i64, if (mtime.tv_nsec > 0) (@intCast(usize, mtime.tv_nsec) / std.time.ns_per_ms) else 0)), .ctime_ms = @truncate(T, @intCast(i64, if (ctime.tv_nsec > 0) (@intCast(usize, ctime.tv_nsec) / std.time.ns_per_ms) else 0)), - .atime = @intToEnum(Date, @intCast(u64, @maximum(atime.tv_sec, 0))), - .mtime = @intToEnum(Date, @intCast(u64, @maximum(mtime.tv_sec, 0))), - .ctime = @intToEnum(Date, @intCast(u64, @maximum(ctime.tv_sec, 0))), + .atime = @intToEnum(Date, @intCast(u64, @max(atime.tv_sec, 0))), + .mtime = @intToEnum(Date, @intCast(u64, @max(mtime.tv_sec, 0))), + .ctime = @intToEnum(Date, @intCast(u64, @max(ctime.tv_sec, 0))), // Linux doesn't include this info in stat // maybe it does in statx, but do you really need birthtime? If you do please file an issue. @@ -1142,7 +1142,7 @@ fn StatsLike(comptime name: [:0]const u8, comptime T: type) type { .birthtime = if (Environment.isLinux) @intToEnum(Date, 0) else - @intToEnum(Date, @intCast(u64, @maximum(stat_.birthtime().tv_sec, 0))), + @intToEnum(Date, @intCast(u64, @max(stat_.birthtime().tv_sec, 0))), }; } @@ -1949,7 +1949,7 @@ pub const Process = struct { } pub fn exit(_: *JSC.JSGlobalObject, code: i32) callconv(.C) void { - std.os.exit(@truncate(u8, @intCast(u32, @maximum(code, 0)))); + std.os.exit(@truncate(u8, @intCast(u32, @max(code, 0)))); } pub export const Bun__version: [:0]const u8 = "v" ++ bun.Global.package_json_version; diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index fb282912265f2e..a17ad38a679222 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -351,7 +351,7 @@ pub const Response = struct { if (args.nextEat()) |init| { if (init.isUndefinedOrNull()) {} else if (init.isNumber()) { - response.body.init.status_code = @intCast(u16, @min(@maximum(0, init.toInt32()), std.math.maxInt(u16))); + response.body.init.status_code = @intCast(u16, @min(@max(0, init.toInt32()), std.math.maxInt(u16))); } else { if (Body.Init.init(getAllocator(globalThis), globalThis, init, init.jsType()) catch null) |_init| { response.body.init = _init; @@ -397,7 +397,7 @@ pub const Response = struct { if (args.nextEat()) |init| { if (init.isUndefinedOrNull()) {} else if (init.isNumber()) { - response.body.init.status_code = @intCast(u16, @min(@maximum(0, init.toInt32()), std.math.maxInt(u16))); + response.body.init.status_code = @intCast(u16, @min(@max(0, init.toInt32()), std.math.maxInt(u16))); } else { if (Body.Init.init(getAllocator(globalThis), globalThis, init, init.jsType()) catch null) |_init| { response.body.init = _init; @@ -2205,7 +2205,7 @@ pub const Blob = struct { if (stat.size > 0 and std.os.S.ISREG(stat.mode)) { this.size = @min( - @truncate(SizeType, @intCast(SizeType, @maximum(@intCast(i64, stat.size), 0))), + @truncate(SizeType, @intCast(SizeType, @max(@intCast(i64, stat.size), 0))), this.max_length, ); // read up to 4k at a time if @@ -2849,7 +2849,7 @@ pub const Blob = struct { } if (stat.size != 0) { - this.max_length = @maximum(@min(@intCast(SizeType, stat.size), this.max_length), this.offset) - this.offset; + this.max_length = @max(@min(@intCast(SizeType, stat.size), this.max_length), this.offset) - this.offset; if (this.max_length == 0) { this.doClose(); return; @@ -3001,7 +3001,7 @@ pub const Blob = struct { return JSValue.jsUndefined(); } - recommended_chunk_size = @intCast(SizeType, @maximum(0, @truncate(i52, arguments[0].toInt64()))); + recommended_chunk_size = @intCast(SizeType, @max(0, @truncate(i52, arguments[0].toInt64()))); } return JSC.WebCore.ReadableStream.fromBlob( globalThis, @@ -3162,7 +3162,7 @@ pub const Blob = struct { const start = start_.toInt64(); if (start < 0) { // If the optional start parameter is negative, let relativeStart be start + size. - relativeStart = @intCast(i64, @maximum(start + @intCast(i64, this.size), 0)); + relativeStart = @intCast(i64, @max(start + @intCast(i64, this.size), 0)); } else { // Otherwise, let relativeStart be start. relativeStart = @min(@intCast(i64, start), @intCast(i64, this.size)); @@ -3174,7 +3174,7 @@ pub const Blob = struct { // If end is negative, let relativeEnd be max((size + end), 0). if (end < 0) { // If the optional start parameter is negative, let relativeStart be start + size. - relativeEnd = @intCast(i64, @maximum(end + @intCast(i64, this.size), 0)); + relativeEnd = @intCast(i64, @max(end + @intCast(i64, this.size), 0)); } else { // Otherwise, let relativeStart be start. relativeEnd = @min(@intCast(i64, end), @intCast(i64, this.size)); @@ -3193,7 +3193,7 @@ pub const Blob = struct { } } - const len = @intCast(SizeType, @maximum(relativeEnd - relativeStart, 0)); + const len = @intCast(SizeType, @max(relativeEnd - relativeStart, 0)); // This copies over the is_all_ascii flag // which is okay because this will only be a <= slice diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index d0f196291aa931..9bd8821dc41d01 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -392,7 +392,7 @@ pub const StreamStart = union(Tag) { if (value.get(globalThis, "highWaterMark")) |chunkSize| { empty = false; - chunk_size = @intCast(JSC.WebCore.Blob.SizeType, @maximum(0, @truncate(i51, chunkSize.toInt64()))); + chunk_size = @intCast(JSC.WebCore.Blob.SizeType, @max(0, @truncate(i51, chunkSize.toInt64()))); } if (!empty) { @@ -409,7 +409,7 @@ pub const StreamStart = union(Tag) { var chunk_size: JSC.WebCore.Blob.SizeType = 0; if (value.get(globalThis, "highWaterMark")) |chunkSize| { - chunk_size = @intCast(JSC.WebCore.Blob.SizeType, @maximum(0, @truncate(i51, chunkSize.toInt64()))); + chunk_size = @intCast(JSC.WebCore.Blob.SizeType, @max(0, @truncate(i51, chunkSize.toInt64()))); } if (value.get(globalThis, "path")) |path| { @@ -445,7 +445,7 @@ pub const StreamStart = union(Tag) { if (value.get(globalThis, "highWaterMark")) |chunkSize| { empty = false; - chunk_size = @intCast(JSC.WebCore.Blob.SizeType, @maximum(256, @truncate(i51, chunkSize.toInt64()))); + chunk_size = @intCast(JSC.WebCore.Blob.SizeType, @max(256, @truncate(i51, chunkSize.toInt64()))); } if (!empty) { @@ -2823,7 +2823,7 @@ pub const ByteStream = struct { return .{ .ready = void{} }; } - return .{ .chunk_size = @maximum(this.highWaterMark, std.mem.page_size) }; + return .{ .chunk_size = @max(this.highWaterMark, std.mem.page_size) }; } pub fn value(this: *@This()) JSValue { @@ -3149,7 +3149,7 @@ pub const FileBlobLoader = struct { this.pending.result = .{ .err = Syscall.Error{ // this is too hacky - .errno = @truncate(Syscall.Error.Int, @intCast(u16, @maximum(1, @errorToInt(err)))), + .errno = @truncate(Syscall.Error.Int, @intCast(u16, @max(1, @errorToInt(err)))), .syscall = .read, }, }; @@ -3390,7 +3390,7 @@ pub const FileBlobLoader = struct { @as(usize, default_fifo_chunk_size); return if (file.max_size > 0) - if (available_to_read != std.math.maxInt(usize)) @min(chunk_size, available_to_read) else @min(@maximum(this.total_read, file.max_size) - this.total_read, chunk_size) + if (available_to_read != std.math.maxInt(usize)) @min(chunk_size, available_to_read) else @min(@max(this.total_read, file.max_size) - this.total_read, chunk_size) else @min(available_to_read, chunk_size); } @@ -3641,9 +3641,9 @@ pub const FileBlobLoader = struct { // Returns when the file pointer is not at the end of // file. data contains the offset from current position // to end of file, and may be negative. - available_to_read = @intCast(usize, @maximum(sizeOrOffset, 0)); + available_to_read = @intCast(usize, @max(sizeOrOffset, 0)); } else if (std.os.S.ISCHR(this.mode) or std.os.S.ISFIFO(this.mode)) { - available_to_read = @intCast(usize, @maximum(sizeOrOffset, 0)); + available_to_read = @intCast(usize, @max(sizeOrOffset, 0)); } } if (this.finalized and this.scheduled_count == 0) { diff --git a/src/cli.zig b/src/cli.zig index 0e79b1e21b3e8c..f0e7526b52ce29 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -743,7 +743,7 @@ pub const HelpCommand = struct { \\ ; - var rand = std.rand.DefaultPrng.init(@intCast(u64, @maximum(std.time.milliTimestamp(), 0))).random(); + var rand = std.rand.DefaultPrng.init(@intCast(u64, @max(std.time.milliTimestamp(), 0))).random(); const package_add_i = rand.uintAtMost(usize, packages_to_add_filler.len - 1); const package_remove_i = rand.uintAtMost(usize, packages_to_remove_filler.len - 1); diff --git a/src/cli/add_completions.zig b/src/cli/add_completions.zig index b8001884dcf7e9..95201659d1e4ab 100644 --- a/src/cli/add_completions.zig +++ b/src/cli/add_completions.zig @@ -78,7 +78,7 @@ pub const biggest_list: usize = brk: { var iter = a.iterator(); var max: usize = 0; while (iter.next()) |list| { - max = @maximum(list.value.len, max); + max = @max(list.value.len, max); } break :brk max; }; diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index b77f90fc52ef44..28d323287ee581 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -118,7 +118,7 @@ pub const UpgradeCheckerThread = struct { } fn _run(env_loader: *DotEnv.Loader) anyerror!void { - var rand = std.rand.DefaultPrng.init(@intCast(u64, @maximum(std.time.milliTimestamp(), 0))); + var rand = std.rand.DefaultPrng.init(@intCast(u64, @max(std.time.milliTimestamp(), 0))); const delay = rand.random().intRangeAtMost(u64, 100, 10000); std.time.sleep(std.time.ns_per_ms * delay); @@ -353,7 +353,7 @@ pub const UpgradeCommand = struct { if (asset.asProperty("size")) |size_| { if (size_.expr.data == .e_number) { - version.size = @intCast(u32, @maximum(@floatToInt(i32, std.math.ceil(size_.expr.data.e_number.value)), 0)); + version.size = @intCast(u32, @max(@floatToInt(i32, std.math.ceil(size_.expr.data.e_number.value)), 0)); } } return version; @@ -458,7 +458,7 @@ pub const UpgradeCommand = struct { refresher.refresh(); var async_http = ctx.allocator.create(HTTP.AsyncHTTP) catch unreachable; var zip_file_buffer = try ctx.allocator.create(MutableString); - zip_file_buffer.* = try MutableString.init(ctx.allocator, @maximum(version.size, 1024)); + zip_file_buffer.* = try MutableString.init(ctx.allocator, @max(version.size, 1024)); async_http.* = HTTP.AsyncHTTP.initSync( ctx.allocator, diff --git a/src/css_scanner.zig b/src/css_scanner.zig index c48a54441f6ad0..2a56d818cf1b61 100644 --- a/src/css_scanner.zig +++ b/src/css_scanner.zig @@ -1248,7 +1248,7 @@ pub fn NewBundler( try this.writer.done(); return CodeCount{ - .written = @intCast(usize, @maximum(this.writer.written - start_count, 0)), + .written = @intCast(usize, @max(this.writer.written - start_count, 0)), .approximate_newline_count = lines_of_code, }; } diff --git a/src/fs.zig b/src/fs.zig index 2392a396fcf7c0..5edd3a30193651 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -127,7 +127,7 @@ pub const FileSystem = struct { return; } - max_fd = @maximum(fd, max_fd); + max_fd = @max(fd, max_fd); } pub var instance_loaded: bool = false; pub var instance: FileSystem = undefined; diff --git a/src/http.zig b/src/http.zig index a56594b9cb7169..41e3f8af30f178 100644 --- a/src/http.zig +++ b/src/http.zig @@ -3445,7 +3445,7 @@ pub const Server = struct { server.websocket_threadpool.stack_size = @truncate( u32, @min( - @maximum(128_000, Fs.FileSystem.RealFS.Limit.stack), + @max(128_000, Fs.FileSystem.RealFS.Limit.stack), 4_000_000, ), ); diff --git a/src/http/url_path.zig b/src/http/url_path.zig index 58bc060757c465..1565db00ae7d34 100644 --- a/src/http/url_path.zig +++ b/src/http/url_path.zig @@ -97,7 +97,7 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath { switch (c) { '?' => { - question_mark_i = @maximum(question_mark_i, i); + question_mark_i = @max(question_mark_i, i); if (question_mark_i < period_i) { period_i = -1; } @@ -107,10 +107,10 @@ pub fn parse(possibly_encoded_pathname_: string) !URLPath { } }, '.' => { - period_i = @maximum(period_i, i); + period_i = @max(period_i, i); }, '/' => { - last_slash = @maximum(last_slash, i); + last_slash = @max(last_slash, i); if (i > 0) { first_segment_end = @min(first_segment_end, i); diff --git a/src/http_client_async.zig b/src/http_client_async.zig index 2d85607e8bcb3c..57726b87c81981 100644 --- a/src/http_client_async.zig +++ b/src/http_client_async.zig @@ -428,7 +428,7 @@ pub const HTTPThread = struct { } fn processEvents_(this: *@This()) void { - this.loop.num_polls = @maximum(2, this.loop.num_polls); + this.loop.num_polls = @max(2, this.loop.num_polls); while (true) { this.drainEvents(); diff --git a/src/install/install.zig b/src/install/install.zig index d13773a5adafe8..ea926197cd97ea 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -3154,7 +3154,7 @@ pub const PackageManager = struct { entry.value_ptr.* = manifest; if (timestamp_this_tick == null) { - timestamp_this_tick = @truncate(u32, @intCast(u64, @maximum(0, std.time.timestamp()))) +| 300; + timestamp_this_tick = @truncate(u32, @intCast(u64, @max(0, std.time.timestamp()))) +| 300; } entry.value_ptr.*.pkg.public_max_age = timestamp_this_tick.?; @@ -3779,7 +3779,7 @@ pub const PackageManager = struct { if (env_loader.map.get("BUN_CONFIG_HTTP_RETRY_COUNT")) |retry_count| { if (std.fmt.parseInt(i32, retry_count, 10)) |int| { - this.max_retry_count = @intCast(u16, @min(@maximum(int, 0), 65355)); + this.max_retry_count = @intCast(u16, @min(@max(int, 0), 65355)); } else |_| {} } @@ -4331,7 +4331,7 @@ pub const PackageManager = struct { ctx.install, ); - manager.timestamp_for_manifest_cache_control = @truncate(u32, @intCast(u64, @maximum(std.time.timestamp(), 0))); + manager.timestamp_for_manifest_cache_control = @truncate(u32, @intCast(u64, @max(std.time.timestamp(), 0))); return manager; } @@ -4417,7 +4417,7 @@ pub const PackageManager = struct { u32, @intCast( u64, - @maximum( + @max( std.time.timestamp(), 0, ), @@ -6623,7 +6623,7 @@ pub const PackageManager = struct { var printed_timestamp = false; if (install_summary.success > 0) { // it's confusing when it shows 3 packages and says it installed 1 - Output.pretty("\n {d} packages installed ", .{@maximum( + Output.pretty("\n {d} packages installed ", .{@max( install_summary.success, @truncate( u32, diff --git a/src/install/integrity.zig b/src/install/integrity.zig index bd4981b7428889..25eb25b935c30e 100644 --- a/src/install/integrity.zig +++ b/src/install/integrity.zig @@ -20,7 +20,7 @@ pub const Integrity = extern struct { var value: usize = 0; for (values) |val| { - value = @maximum(val, value); + value = @max(val, value); } break :brk value; diff --git a/src/install/npm.zig b/src/install/npm.zig index 2be84624cf8fdd..d8a763ce2242da 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -208,7 +208,7 @@ pub const Registry = struct { package_name, newly_last_modified, new_etag, - @truncate(u32, @intCast(u64, @maximum(0, std.time.timestamp()))) + 300, + @truncate(u32, @intCast(u64, @max(0, std.time.timestamp()))) + 300, )) |package| { if (package_manager.options.enable.manifest_cache) { PackageManifest.Serializer.save(&package, package_manager.getTemporaryDirectory(), package_manager.getCacheDirectory()) catch {}; @@ -556,7 +556,7 @@ pub const PackageManifest = struct { var out_path_buf: ["-18446744073709551615".len + ".npm".len + 1]u8 = undefined; var dest_path_stream = std.io.fixedBufferStream(&dest_path_buf); var dest_path_stream_writer = dest_path_stream.writer(); - try dest_path_stream_writer.print("{x}.npm-{x}", .{ file_id, @maximum(std.time.milliTimestamp(), 0) }); + try dest_path_stream_writer.print("{x}.npm-{x}", .{ file_id, @max(std.time.milliTimestamp(), 0) }); try dest_path_stream_writer.writeByte(0); var tmp_path: [:0]u8 = dest_path_buf[0 .. dest_path_stream.pos - 1 :0]; try writeFile(this, tmp_path, tmpdir); @@ -620,15 +620,15 @@ pub const PackageManifest = struct { pub fn reportSize(this: *const PackageManifest) void { Output.prettyErrorln( - \\ Versions count: {d} - \\ External Strings count: {d} + \\ Versions count: {d} + \\ External Strings count: {d} \\ Package Versions count: {d} - \\ + \\ \\ Bytes: \\ - \\ Versions: {d} - \\ External: {d} - \\ Packages: {d} + \\ Versions: {d} + \\ External: {d} + \\ Packages: {d} \\ Strings: {d} \\ Total: {d} , .{ diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig index b4860074732481..4438ca4153297e 100644 --- a/src/install/resolvers/folder_resolver.zig +++ b/src/install/resolvers/folder_resolver.zig @@ -134,7 +134,7 @@ pub const FolderResolution = union(Tag) { const len = try package_json.getEndPos(); body.data.reset(); - body.data.inflate(@maximum(len, 2048)) catch unreachable; + body.data.inflate(@max(len, 2048)) catch unreachable; body.data.list.expandToCapacity(); const source_buf = try package_json.readAll(body.data.list.items); diff --git a/src/js_ast.zig b/src/js_ast.zig index 0bd73c93fded05..c0acd74da651b9 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -1236,22 +1236,22 @@ pub const E = struct { pub inline fn toU64(self: Number) u64 { @setRuntimeSafety(false); - return @floatToInt(u64, @maximum(@trunc(self.value), 0)); + return @floatToInt(u64, @max(@trunc(self.value), 0)); } pub inline fn toUsize(self: Number) usize { @setRuntimeSafety(false); - return @floatToInt(usize, @maximum(@trunc(self.value), 0)); + return @floatToInt(usize, @max(@trunc(self.value), 0)); } pub inline fn toU32(self: Number) u32 { @setRuntimeSafety(false); - return @floatToInt(u32, @maximum(@trunc(self.value), 0)); + return @floatToInt(u32, @max(@trunc(self.value), 0)); } pub inline fn toU16(self: Number) u16 { @setRuntimeSafety(false); - return @floatToInt(u16, @maximum(@trunc(self.value), 0)); + return @floatToInt(u16, @max(@trunc(self.value), 0)); } pub fn jsonStringify(self: *const Number, opts: anytype, o: anytype) !void { @@ -8120,7 +8120,7 @@ pub const Macro = struct { // Give it >= 256 KB stack space // Cast to usize to ensure we get an 8 byte aligned pointer - const PooledFrame = ObjectPool([@maximum(@sizeOf(@Frame(Run.runAsync)), 1024 * 1024 * 2) / @sizeOf(usize)]usize, null, true, 1); + const PooledFrame = ObjectPool([@max(@sizeOf(@Frame(Run.runAsync)), 1024 * 1024 * 2) / @sizeOf(usize)]usize, null, true, 1); var pooled_frame = PooledFrame.get(default_allocator); defer pooled_frame.release(); diff --git a/src/js_printer.zig b/src/js_printer.zig index f5614d5f5c15f9..ad5be8b23ab326 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -4724,7 +4724,7 @@ pub fn NewWriter( pub const Error = error{FormatError}; pub fn writeAll(writer: *Self, bytes: anytype) Error!usize { - const written = @maximum(writer.written, 0); + const written = @max(writer.written, 0); writer.print(@TypeOf(bytes), bytes); return @intCast(usize, writer.written) - @intCast(usize, written); } @@ -5080,7 +5080,7 @@ pub fn printAst( try printer.writer.done(); - return @intCast(usize, @maximum(printer.writer.written, 0)); + return @intCast(usize, @max(printer.writer.written, 0)); } pub fn printJSON( @@ -5114,7 +5114,7 @@ pub fn printJSON( } try printer.writer.done(); - return @intCast(usize, @maximum(printer.writer.written, 0)); + return @intCast(usize, @max(printer.writer.written, 0)); } pub fn printCommonJS( @@ -5172,7 +5172,7 @@ pub fn printCommonJS( try printer.writer.done(); - return @intCast(usize, @maximum(printer.writer.written, 0)); + return @intCast(usize, @max(printer.writer.written, 0)); } pub const WriteResult = struct { @@ -5257,7 +5257,7 @@ pub fn printCommonJSThreaded( @atomicStore(u32, end_off_ptr, result.end_off, .SeqCst); } - result.len = @intCast(usize, @maximum(printer.writer.written, 0)); + result.len = @intCast(usize, @max(printer.writer.written, 0)); return result; } diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig index 1ec9a1f3785cdd..bb900639f459ef 100644 --- a/src/libarchive/libarchive.zig +++ b/src/libarchive/libarchive.zig @@ -569,7 +569,7 @@ pub const Archive = struct { }; defer if (comptime close_handles) file.close(); - const entry_size = @maximum(lib.archive_entry_size(entry), 0); + const entry_size = @max(lib.archive_entry_size(entry), 0); const size = @intCast(usize, entry_size); if (size > 0) { if (ctx) |ctx_| { diff --git a/src/logger.zig b/src/logger.zig index 75309c84326479..03cbc6c206864b 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -73,7 +73,7 @@ pub const Loc = packed struct { pub const toUsize = i; pub inline fn i(self: *const Loc) usize { - return @intCast(usize, @maximum(self.start, 0)); + return @intCast(usize, @max(self.start, 0)); } pub const Empty = Loc{ .start = -1 }; @@ -1231,7 +1231,7 @@ pub const Source = struct { pub fn initErrorPosition(self: *const Source, _offset: Loc) ErrorPosition { var prev_code_point: i32 = 0; - var offset: usize = std.math.min(if (_offset.start < 0) 0 else @intCast(usize, _offset.start), @maximum(self.contents.len, 1) - 1); + var offset: usize = std.math.min(if (_offset.start < 0) 0 else @intCast(usize, _offset.start), @max(self.contents.len, 1) - 1); const contents = self.contents; diff --git a/src/mdx/mdx_parser.zig b/src/mdx/mdx_parser.zig index c46b43f8f37dea..f79fc8035cd76d 100644 --- a/src/mdx/mdx_parser.zig +++ b/src/mdx/mdx_parser.zig @@ -1797,7 +1797,7 @@ pub const MDX = struct { var root_children = std.ArrayListUnmanaged(Expr){}; var first_loc = try run(this, &root_children); - first_loc.start = @maximum(first_loc.start, 0); + first_loc.start = @max(first_loc.start, 0); const args_loc = first_loc; first_loc.start += 1; const body_loc = first_loc; diff --git a/src/network_thread.zig b/src/network_thread.zig index f48e94e5a1446d..a3baebacb4846a 100644 --- a/src/network_thread.zig +++ b/src/network_thread.zig @@ -34,27 +34,27 @@ pub fn onStartIOThread(waker: AsyncIO.Waker) void { if (comptime Environment.isLinux) { if (err == error.SystemOutdated) { Output.prettyErrorln( - \\error: Linux kernel version doesn't support io_uring, which Bun depends on. + \\error: Linux kernel version doesn't support io_uring, which Bun depends on. \\ \\ To fix this error: please upgrade to a newer Linux kernel. - \\ + \\ \\ If you're using Windows Subsystem for Linux, here's how: \\ 1. Open PowerShell as an administrator \\ 2. Run this: \\ wsl --update \\ wsl --shutdown - \\ + \\ \\ Please make sure you're using WSL version 2 (not WSL 1). To check: wsl -l -v \\ If you are on WSL 1, update to WSL 2 with the following commands: \\ 1. wsl --set-default-version 2 \\ 2. wsl --set-version [distro_name] 2 \\ 3. Now follow the WSL 2 instructions above. \\ Where [distro_name] is one of the names from the list given by: wsl -l -v - \\ + \\ \\ If that doesn't work (and you're on a Windows machine), try this: \\ 1. Open Windows Update \\ 2. Download any updates to Windows Subsystem for Linux - \\ + \\ \\ If you're still having trouble, ask for help in bun's discord https://bun.sh/discord , .{}); break :log; @@ -65,7 +65,7 @@ pub fn onStartIOThread(waker: AsyncIO.Waker) void { \\To fix this error: please increase the memlock limit or upgrade to Linux kernel 5.11+ \\ \\If Bun is running inside Docker, make sure to set the memlock limit to unlimited (-1) - \\ + \\ \\ docker run --rm --init --ulimit memlock=-1:-1 jarredsumner/bun:edge \\ \\To bump the memlock limit, check one of the following: @@ -203,7 +203,7 @@ pub const AddressListCache = std.HashMap(u64, CachedAddressList, IdentityContext pub var address_list_cached: AddressListCache = undefined; pub fn getAddressList(allocator: std.mem.Allocator, name: []const u8, port: u16) !*std.net.AddressList { // const hash = CachedAddressList.hash(name, port); - // const now = @intCast(u64, @maximum(0, std.time.milliTimestamp())); + // const now = @intCast(u64, @max(0, std.time.milliTimestamp())); // if (address_list_cached.getPtr(hash)) |cached| { // if (cached.expire_after > now) { // return cached; diff --git a/src/report.zig b/src/report.zig index 79e60f5be49085..f1ded35984d2b1 100644 --- a/src/report.zig +++ b/src/report.zig @@ -69,7 +69,7 @@ pub const CrashReportWriter = struct { const file_path = std.fmt.bufPrintZ( &crash_reporter_path, "{s}/.bun-crash/v{s}-{d}.crash", - .{ base_dir, Global.package_json_version, @intCast(u64, @maximum(std.time.milliTimestamp(), 0)) }, + .{ base_dir, Global.package_json_version, @intCast(u64, @max(std.time.milliTimestamp(), 0)) }, ) catch return; std.fs.cwd().makeDir(std.fs.path.dirname(std.mem.span(file_path)).?) catch {}; @@ -192,7 +192,7 @@ pub fn fatal(err_: ?anyerror, msg_: ?string) void { if (msg_) |msg| { const msg_ptr = @ptrToInt(msg.ptr); if (msg_ptr > 0) { - const len = @maximum(@min(msg.len, 1024), 0); + const len = @max(@min(msg.len, 1024), 0); if (len > 0) { if (Output.isEmojiEnabled()) { @@ -296,7 +296,7 @@ pub noinline fn handleCrash(signal: i32, addr: usize) void { } } - std.c._exit(128 + @truncate(u8, @intCast(u8, @maximum(signal, 0)))); + std.c._exit(128 + @truncate(u8, @intCast(u8, @max(signal, 0)))); } pub noinline fn globalError(err: anyerror) noreturn { diff --git a/src/router.zig b/src/router.zig index dac234faaaa36b..fa1a6d13b275ac 100644 --- a/src/router.zig +++ b/src/router.zig @@ -500,8 +500,8 @@ pub const TinyPtr = packed struct { std.debug.assert(end < right); } - const length = @maximum(end, right) - right; - const offset = @maximum(@ptrToInt(in.ptr), @ptrToInt(parent.ptr)) - @ptrToInt(parent.ptr); + const length = @max(end, right) - right; + const offset = @max(@ptrToInt(in.ptr), @ptrToInt(parent.ptr)) - @ptrToInt(parent.ptr); return TinyPtr{ .offset = @truncate(u16, offset), .len = @truncate(u16, length) }; } }; @@ -1214,7 +1214,7 @@ const Pattern = struct { return null; }; offset = pattern.len; - kind = @maximum(@enumToInt(@as(Pattern.Tag, pattern.value)), kind); + kind = @max(@enumToInt(@as(Pattern.Tag, pattern.value)), kind); count += @intCast(u16, @boolToInt(@enumToInt(@as(Pattern.Tag, pattern.value)) > @enumToInt(Pattern.Tag.static))); } diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 84545b977853e5..c96b1e0b909971 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -617,7 +617,7 @@ pub const LineOffsetTable = struct { pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_line_count: i32) List { var list = List{}; // Preallocate the top-level table using the approximate line count from the lexer - list.ensureUnusedCapacity(allocator, @intCast(usize, @maximum(approximate_line_count, 1))) catch unreachable; + list.ensureUnusedCapacity(allocator, @intCast(usize, @max(approximate_line_count, 1))) catch unreachable; var column: i32 = 0; var byte_offset_to_first_non_ascii: u32 = 0; var column_byte_offset: u32 = 0; @@ -676,7 +676,7 @@ pub const LineOffsetTable = struct { } } else { switch (c) { - (@maximum('\r', '\n') + 1)...127 => { + (@max('\r', '\n') + 1)...127 => { // skip ahead to the next newline or non-ascii character if (strings.indexOfNewlineOrNonASCIICheckStart(remaining, @as(u32, len_), false)) |j| { column += @intCast(i32, j); @@ -1094,7 +1094,7 @@ pub const Chunk = struct { b.prev_loc = loc; const list = b.line_offset_tables; const original_line = LineOffsetTable.findLine(list, loc); - const line = list.get(@intCast(usize, @maximum(original_line, 0))); + const line = list.get(@intCast(usize, @max(original_line, 0))); // Use the line to compute the column var original_column = loc.start - @intCast(i32, line.byte_offset_to_start_of_line); diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 8b9f50dcce57b3..b36f85ed3410bf 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -958,7 +958,7 @@ pub fn toUTF16Alloc(allocator: std.mem.Allocator, bytes: []const u8, comptime fa } } - remaining = remaining[@maximum(replacement.len, 1)..]; + remaining = remaining[@max(replacement.len, 1)..]; const new_len = strings.u16Len(replacement.code_point); try output.ensureUnusedCapacity(new_len); output.items.len += @as(usize, new_len); @@ -993,7 +993,7 @@ pub fn toUTF16Alloc(allocator: std.mem.Allocator, bytes: []const u8, comptime fa } } - remaining = remaining[@maximum(replacement.len, 1)..]; + remaining = remaining[@max(replacement.len, 1)..]; const new_len = j + @as(usize, strings.u16Len(replacement.code_point)); try output.ensureUnusedCapacity(new_len); output.items.len += new_len; diff --git a/src/string_types.zig b/src/string_types.zig index d7e95b3f760d21..a9f0d2e25b05f7 100644 --- a/src/string_types.zig +++ b/src/string_types.zig @@ -103,7 +103,7 @@ pub const HashedString = struct { pub fn Eql(this: HashedString, comptime Other: type, other: Other) bool { switch (comptime Other) { HashedString, *HashedString, *const HashedString => { - return ((@maximum(this.hash, other.hash) > 0 and this.hash == other.hash) or (this.ptr == other.ptr)) and this.len == other.len; + return ((@max(this.hash, other.hash) > 0 and this.hash == other.hash) or (this.ptr == other.ptr)) and this.len == other.len; }, else => { return @as(usize, this.len) == other.len and @truncate(u32, std.hash.Wyhash.hash(0, other[0..other.len])) == this.hash; diff --git a/src/url.zig b/src/url.zig index 7e47720791e57c..56cc8182a543f6 100644 --- a/src/url.zig +++ b/src/url.zig @@ -843,7 +843,7 @@ fn stringPointerFromStrings(parent: string, in: string) Api.StringPointer { if (in_end < end) return Api.StringPointer{}; return Api.StringPointer{ - .offset = @truncate(u32, @maximum(@ptrToInt(in.ptr), @ptrToInt(parent.ptr)) - @ptrToInt(parent.ptr)), + .offset = @truncate(u32, @max(@ptrToInt(in.ptr), @ptrToInt(parent.ptr)) - @ptrToInt(parent.ptr)), .length = @truncate(u32, in.len), }; } diff --git a/src/watcher.zig b/src/watcher.zig index d21aa4aafc1b44..e627e4b5196991 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -436,7 +436,7 @@ pub fn NewWatcher(comptime ContextType: type) type { null, ); - var changes = changelist[0..@intCast(usize, @maximum(0, count_))]; + var changes = changelist[0..@intCast(usize, @max(0, count_))]; var watchevents = this.watch_events[0..changes.len]; for (changes) |event, i| { watchevents[i].fromKEvent(event); diff --git a/src/work_pool.zig b/src/work_pool.zig index 9ddf106e999877..364ca8abed70e4 100644 --- a/src/work_pool.zig +++ b/src/work_pool.zig @@ -13,7 +13,7 @@ pub fn NewWorkPool(comptime max_threads: ?usize) type { @setCold(true); pool = ThreadPool.init(.{ - .max_threads = max_threads orelse @floatToInt(u32, @floor(@intToFloat(f32, @maximum(std.Thread.getCpuCount() catch 0, 2)) * 0.8)), + .max_threads = max_threads orelse @floatToInt(u32, @floor(@intToFloat(f32, @max(std.Thread.getCpuCount() catch 0, 2)) * 0.8)), .stack_size = 2 * 1024 * 1024, }); return &pool; From bf86fe99602d7d698f489e4c4de086b1f3a19e23 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:08:51 +0100 Subject: [PATCH 04/51] Fix: number '0000010' has leading zero --- src/bundler/generate_node_modules_bundle.zig | 2 +- src/install/lockfile.zig | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/bundler/generate_node_modules_bundle.zig b/src/bundler/generate_node_modules_bundle.zig index a27dc0af3a9aab..1b18bc7e445154 100644 --- a/src/bundler/generate_node_modules_bundle.zig +++ b/src/bundler/generate_node_modules_bundle.zig @@ -848,7 +848,7 @@ pub fn generate( _ = C.fchmod( this.tmpfile.handle, // chmod 777 - 0000010 | 0000100 | 0000001 | 0001000 | 0000040 | 0000004 | 0000002 | 0000400 | 0000200 | 0000020, + 0o10 | 0o100 | 0o1 | 0o1000 | 0o40 | 0o4 | 0o2 | 0o400 | 0o200 | 0o20, ); try tmpfile.promote(tmpname, top_dir.fd, destination); // Print any errors at the end diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index c737c8341b9d79..eb5457f9fd279c 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -1166,7 +1166,7 @@ pub const Printer = struct { try writer.writeAll( \\# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. \\# yarn lockfile v1 - \\# bun ./bun.lockb --hash: + \\# bun ./bun.lockb --hash: ); try writer.print( "{}\n\n", @@ -1444,7 +1444,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ) void { _ = C.fchmod( tmpfile.fd, // chmod 777 - 0000010 | 0000100 | 0000001 | 0001000 | 0000040 | 0000004 | 0000002 | 0000400 | 0000200 | 0000020, + 0o10 | 0o100 | 0o1 | 0o1000 | 0o40 | 0o4 | 0o2 | 0o400 | 0o200 | 0o20, ); tmpfile.promote(tmpname, std.fs.cwd().fd, filename) catch |err| { From e73b0484767641f4ef056fe2a9731ff514dead6e Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:12:51 +0100 Subject: [PATCH 05/51] Fix: number '0000010' has leading zero --- src/libarchive/libarchive-bindings.zig | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/libarchive/libarchive-bindings.zig b/src/libarchive/libarchive-bindings.zig index e1c3ef920f73ba..381414016afc4a 100644 --- a/src/libarchive/libarchive-bindings.zig +++ b/src/libarchive/libarchive-bindings.zig @@ -11,13 +11,13 @@ const FILE = @import("std").c.FILE; const dev_t = @import("std").c.dev_t; pub const FileType = enum(mode_t) { - regular = 0100000, - link = 0120000, - socket = 0140000, - character_oriented_device = 0020000, - block_oriented_device = 0060000, - directory = 0040000, - fifo = 0010000, + regular = 0o100000, + link = 0o120000, + socket = 0o140000, + character_oriented_device = 0o20000, + block_oriented_device = 0o60000, + directory = 0o40000, + fifo = 0o10000, }; pub const SymlinkType = enum(c_int) { From c7dc0ce68a8fb56800c8bb7818b84eb1b9ac5459 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:17:04 +0100 Subject: [PATCH 06/51] Fix: capture shadows declaration of 'str' --- src/install/npm.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/install/npm.zig b/src/install/npm.zig index d8a763ce2242da..88bc6c76c072b3 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -1045,8 +1045,8 @@ pub const PackageManifest = struct { } } }, - .e_string => |str| { - package_version.cpu = Architecture.apply(Architecture.none, str.data); + .e_string => |estr| { + package_version.cpu = Architecture.apply(Architecture.none, estr.data); }, else => {}, } From 6dfd99739195720a96b93f5f33d2c0ed8c39ba5b Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:19:10 +0100 Subject: [PATCH 07/51] Fix: pointless discard of function parameter --- src/http_client_async.zig | 1 - 1 file changed, 1 deletion(-) diff --git a/src/http_client_async.zig b/src/http_client_async.zig index 57726b87c81981..669257e73b9048 100644 --- a/src/http_client_async.zig +++ b/src/http_client_async.zig @@ -512,7 +512,6 @@ pub fn onClose( comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, ) void { - _ = socket; log("Closed {s}\n", .{client.url.href}); const in_progress = client.state.stage != .done and client.state.stage != .fail; From 8ea81203ed20692015570a2d8d6bcecbcf712560 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:55:00 +0100 Subject: [PATCH 08/51] Fix: @ctz - expected 1 argument, found 2 --- src/lock.zig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/lock.zig b/src/lock.zig index 7e6bfccdf61385..2237fd591e66ea 100644 --- a/src/lock.zig +++ b/src/lock.zig @@ -24,7 +24,8 @@ pub const Mutex = struct { inline fn acquireFast(self: *Mutex, comptime strong: bool) bool { // On x86, "lock bts" uses less i-cache & can be faster than "lock cmpxchg" below. if (comptime is_x86) { - return self.state.bitSet(@ctz(u32, LOCKED), .Acquire) == UNLOCKED; + const locked_bit = @ctz(@as(u32, LOCKED)); + return self.state.bitSet(locked_bit, .Acquire) == UNLOCKED; } const cas_fn = comptime switch (strong) { From 5c8b61f843c3cefd59409a7907b43ece0804a480 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:55:52 +0100 Subject: [PATCH 09/51] Fix: @ctz - expected 1 argument, found 2 --- src/js_lexer.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 3a5cd39993a7bb..056e777db13eb7 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -3012,7 +3012,7 @@ fn indexOfInterestingCharacterInStringLiteral(text_: []const u8, quote: u8) ?usi if (@reduce(.Max, any_significant) > 0) { const bitmask = @ptrCast(*const u16, &any_significant).*; - const first = @ctz(u16, bitmask); + const first = @ctz(@as(u16, bitmask)); std.debug.assert(first < strings.ascii_vector_size); return first + (@ptrToInt(text.ptr) - @ptrToInt(text_.ptr)); } From b4b437497c509cf506dde47547acdcc0a191a4b1 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:56:18 +0100 Subject: [PATCH 10/51] Fix: @ctz - expected 1 argument, found 2 --- src/sync.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sync.zig b/src/sync.zig index 22c99a05b62bc2..262f5fa0df8b67 100644 --- a/src/sync.zig +++ b/src/sync.zig @@ -657,8 +657,8 @@ else const IS_WRITING: usize = 1; const WRITER: usize = 1 << 1; const READER: usize = 1 << (1 + std.meta.bitCount(Count)); - const WRITER_MASK: usize = std.math.maxInt(Count) << @ctz(usize, WRITER); - const READER_MASK: usize = std.math.maxInt(Count) << @ctz(usize, READER); + const WRITER_MASK: usize = std.math.maxInt(Count) << @ctz(@as(usize, WRITER)); + const READER_MASK: usize = std.math.maxInt(Count) << @ctz(@as(usize, READER)); const Count = std.meta.Int(.unsigned, @divFloor(std.meta.bitCount(usize) - 1, 2)); pub fn init() RwLock { From 1eca6e67acd1c8da4a699bf17ddef5c321de5368 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:57:55 +0100 Subject: [PATCH 11/51] Fix: @ctz - expected 1 argument, found 2 --- src/string_immutable.zig | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/string_immutable.zig b/src/string_immutable.zig index b36f85ed3410bf..6ddb76a006f8f3 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -1135,7 +1135,7 @@ pub fn allocateLatin1IntoUTF8WithList(list_: std.ArrayList(u8), offset_into_list const mask = bytes & 0x8080808080808080; if (mask > 0) { - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime Environment.allow_assert) { assert(latin1[first_set_byte] >= 127); } @@ -1375,7 +1375,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_ buf[0..size].* = @bitCast([size]u8, bytes); if (mask > 0) { - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime Environment.allow_assert) { assert(latin1[first_set_byte] >= 127); } @@ -2609,7 +2609,7 @@ pub fn firstNonASCIIWithType(comptime Type: type, slice: Type) ?u32 { const mask = bytes & 0x8080808080808080; if (mask > 0) { - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime Environment.allow_assert) { assert(remaining[first_set_byte] > 127); var j: usize = 0; @@ -2729,7 +2729,7 @@ pub fn indexOfNewlineOrNonASCIICheckStart(slice_: []const u8, offset: u32, compt if (@reduce(.Max, cmp) > 0) { const bitmask = @ptrCast(*const AsciiVectorInt, &cmp).*; - const first = @ctz(AsciiVectorInt, bitmask); + const first = @ctz(@as(AsciiVectorInt, bitmask)); return @as(u32, first) + @intCast(u32, slice.len - remaining.len) + offset; } @@ -2768,7 +2768,7 @@ pub fn indexOfNeedsEscape(slice: []const u8) ?u32 { if (@reduce(.Max, cmp) > 0) { const bitmask = @ptrCast(*const AsciiVectorInt, &cmp).*; - const first = @ctz(AsciiVectorInt, bitmask); + const first = @ctz(@as(AsciiVectorInt, bitmask)); return @as(u32, first) + @truncate(u32, @ptrToInt(remaining.ptr) - @ptrToInt(slice.ptr)); } @@ -2810,7 +2810,7 @@ pub fn indexOfChar(slice: []const u8, char: u8) ?u32 { if (@reduce(.Max, @bitCast(AsciiVectorU1, cmp)) > 0) { const bitmask = @ptrCast(*const AsciiVectorInt, &cmp).*; - const first = @ctz(AsciiVectorInt, bitmask); + const first = @ctz(@as(AsciiVectorInt, bitmask)); return @intCast(u32, @as(u32, first) + @intCast(u32, slice.len - remaining.len)); } remaining = remaining[ascii_vector_size..]; @@ -2875,7 +2875,7 @@ pub fn indexOfNotChar(slice: []const u8, char: u8) ?u32 { const cmp = @splat(ascii_vector_size, char) != vec; if (@reduce(.Max, @bitCast(AsciiVectorU1, cmp)) > 0) { const bitmask = @ptrCast(*const AsciiVectorInt, &cmp).*; - const first = @ctz(AsciiVectorInt, bitmask); + const first = @ctz(@as(AsciiVectorInt, bitmask)); return @as(u32, first) + @intCast(u32, slice.len - remaining.len); } @@ -3077,7 +3077,7 @@ pub fn firstNonASCII16CheckMin(comptime Slice: type, slice: Slice, comptime chec const cmp = @bitCast(AsciiVectorU16U1, vec > max_u16_ascii) | @bitCast(AsciiVectorU16U1, vec < min_u16_ascii); const bitmask: u16 = @ptrCast(*const u16, &cmp).*; - const first = @ctz(u16, bitmask); + const first = @ctz(@as(u16, bitmask)); return @intCast(u32, @as(u32, first) + @intCast(u32, slice.len - remaining.len)); @@ -3136,7 +3136,7 @@ pub fn @"nextUTF16NonASCIIOr$`\\"( @bitCast(AsciiVectorU16U1, (vec == @splat(ascii_u16_vector_size, @as(u16, '\\')))); const bitmask = @ptrCast(*const u8, &cmp).*; - const first = @ctz(u8, bitmask); + const first = @ctz(@as(u8, bitmask)); if (first < ascii_u16_vector_size) { return @intCast(u32, @as(u32, first) + @intCast(u32, slice.len - remaining.len)); From a8440ddf5d250a9d5b005ef64d62379054dde738 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 00:59:29 +0100 Subject: [PATCH 12/51] Fix: @ctz - expected 1 argument, found 2 --- src/string_immutable.zig | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 6ddb76a006f8f3..ba69b1627ae9a6 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -1157,7 +1157,7 @@ pub fn allocateLatin1IntoUTF8WithList(list_: std.ArrayList(u8), offset_into_list const mask = bytes & 0x8080808080808080; if (mask > 0) { - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime Environment.allow_assert) { assert(latin1[first_set_byte] >= 127); } @@ -1186,7 +1186,7 @@ pub fn allocateLatin1IntoUTF8WithList(list_: std.ArrayList(u8), offset_into_list const mask = bytes & 0x8080808080808080; if (mask > 0) { - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime Environment.allow_assert) { assert(latin1[first_set_byte] >= 127); } @@ -2627,7 +2627,7 @@ pub fn firstNonASCIIWithType(comptime Type: type, slice: Type) ?u32 { const mask = bytes & 0x8080808080808080; if (mask > 0) { - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime Environment.allow_assert) { assert(remaining[first_set_byte] > 127); var j: usize = 0; @@ -2672,7 +2672,7 @@ pub fn firstNonASCIIWithType(comptime Type: type, slice: Type) ?u32 { if (mask > 0) { remaining.len -= @ptrToInt(remaining.ptr) - @ptrToInt(remaining_start); - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime Environment.allow_assert) { assert(remaining[first_set_byte] > 127); var j: usize = 0; @@ -3088,7 +3088,7 @@ pub fn firstNonASCII16CheckMin(comptime Slice: type, slice: Slice, comptime chec const cmp = vec > max_u16_ascii; const bitmask = @ptrCast(*const u16, &cmp).*; - const first = @ctz(u16, bitmask); + const first = @ctz(@as(u16, bitmask)); return @intCast(u32, @as(u32, first) + @intCast(u32, slice.len - remaining.len)); From 87f91bdac7681e2a983ac08e340d46786c69b2e5 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:00:13 +0100 Subject: [PATCH 13/51] Fix: @ctz - expected 1 argument, found 2 --- src/string_immutable.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/string_immutable.zig b/src/string_immutable.zig index ba69b1627ae9a6..7dcf390a0eb2bf 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -1397,7 +1397,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_ buf[0..size].* = @bitCast([size]u8, bytes); assert(mask > 0); - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime Environment.allow_assert) { assert(latin1[first_set_byte] >= 127); } From de56b55af5cac07c4b2117de4e35d533a764a2c1 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:02:36 +0100 Subject: [PATCH 14/51] Fix: declaration 'Type' shadows function parameter from outer scope --- src/ref_count.zig | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/ref_count.zig b/src/ref_count.zig index 589176c1ddc036..79fffb8ac194b2 100644 --- a/src/ref_count.zig +++ b/src/ref_count.zig @@ -1,10 +1,10 @@ const std = @import("std"); -pub fn RefCount(comptime Type: type, comptime deinit_on_zero: bool) type { +pub fn RefCount(comptime MyType: type, comptime deinit_on_zero: bool) type { return struct { const AllocatorType = if (deinit_on_zero) std.mem.Allocator else void; - value: Type, + value: MyType, count: i32 = 1, allocator: AllocatorType = undefined, @@ -14,7 +14,7 @@ pub fn RefCount(comptime Type: type, comptime deinit_on_zero: bool) type { /// Create a new reference counted value. pub inline fn init( - value: Type, + value: MyType, allocator: std.mem.Allocator, ) !*@This() { var ptr = try allocator.create(@This()); @@ -23,7 +23,7 @@ pub fn RefCount(comptime Type: type, comptime deinit_on_zero: bool) type { } /// Get the value & increment the reference count. - pub inline fn get(this: *@This()) *Type { + pub inline fn get(this: *@This()) *MyType { std.debug.assert(this.count >= 0); this.count += 1; @@ -31,7 +31,7 @@ pub fn RefCount(comptime Type: type, comptime deinit_on_zero: bool) type { } /// Get the value without incrementing the reference count. - pub inline fn leak(this: *@This()) *Type { + pub inline fn leak(this: *@This()) *MyType { return &this.value; } @@ -42,7 +42,7 @@ pub fn RefCount(comptime Type: type, comptime deinit_on_zero: bool) type { pub inline fn create( this: *@This(), - value: Type, + value: MyType, allocator: AllocatorType, ) void { this.* = .{ @@ -53,7 +53,7 @@ pub fn RefCount(comptime Type: type, comptime deinit_on_zero: bool) type { } pub inline fn deinit(this: *@This()) void { - if (comptime @hasDecl(Type, "deinit")) { + if (comptime @hasDecl(MyType, "deinit")) { this.value.deinit(); } @@ -75,6 +75,6 @@ pub fn RefCount(comptime Type: type, comptime deinit_on_zero: bool) type { } } - pub const Type = Type; + pub const Type = MyType; }; } From 206bb5d298e9d6c4e0b6dbc32d0a9ce9ec97082e Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:02:57 +0100 Subject: [PATCH 15/51] Fix: empty test name must be omitted --- src/fs.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/fs.zig b/src/fs.zig index 5edd3a30193651..9cc2a3170e9fb9 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -1354,6 +1354,6 @@ test "PathName.init" { try std.testing.expectEqualStrings(res.ext, ".ext"); } -test "" { +test { @import("std").testing.refAllDecls(FileSystem); } From cafe22a28b3b5c1e5cdd65c40ff8ed41e19f666f Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:05:42 +0100 Subject: [PATCH 16/51] Fix: octal has leading zeros --- src/bun.js/node/node_fs.zig | 8 ++++---- src/c.zig | 4 ++-- src/install/install.zig | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index edbe7158672395..669537184da0b0 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -1524,7 +1524,7 @@ const Arguments = struct { pub const WriteFile = struct { encoding: Encoding = Encoding.utf8, flag: FileSystemFlags = FileSystemFlags.@"w", - mode: Mode = 0666, + mode: Mode = 0o666, file: PathOrFileDescriptor, data: StringOrBuffer, @@ -2395,7 +2395,7 @@ pub const NodeFS = struct { const path = path_.sliceZ(&this.sync_error_buf); switch (comptime flavor) { .sync => { - const fd = switch (Syscall.open(path, @enumToInt(FileSystemFlags.@"a"), 000666)) { + const fd = switch (Syscall.open(path, @enumToInt(FileSystemFlags.@"a"), 0o666)) { .result => |result| result, .err => |err| return .{ .err = err }, }; @@ -2480,7 +2480,7 @@ pub const NodeFS = struct { return ret.success; } } else { - const src_fd = switch (Syscall.open(src, std.os.O.RDONLY, 0644)) { + const src_fd = switch (Syscall.open(src, std.os.O.RDONLY, 0o644)) { .result => |result| result, .err => |err| return .{ .err = err.withPath(args.src.slice()) }, }; @@ -2579,7 +2579,7 @@ pub const NodeFS = struct { return Maybe(Return.CopyFile).todo; } - const src_fd = switch (Syscall.open(src, std.os.O.RDONLY, 0644)) { + const src_fd = switch (Syscall.open(src, std.os.O.RDONLY, 0o644)) { .result => |result| result, .err => |err| return .{ .err = err }, }; diff --git a/src/c.zig b/src/c.zig index 56b27fdf7fad87..11ff4b27fe698e 100644 --- a/src/c.zig +++ b/src/c.zig @@ -122,7 +122,7 @@ pub fn moveFileZWithHandle(from_handle: std.os.fd_t, from_dir: std.os.fd_t, file // On Linux, this will be fast because sendfile() supports copying between two file descriptors on disk // macOS & BSDs will be slow because pub fn moveFileZSlow(from_dir: std.os.fd_t, filename: [*:0]const u8, to_dir: std.os.fd_t, destination: [*:0]const u8) !void { - const in_handle = try std.os.openatZ(from_dir, filename, std.os.O.RDONLY | std.os.O.CLOEXEC, 0600); + const in_handle = try std.os.openatZ(from_dir, filename, std.os.O.RDONLY | std.os.O.CLOEXEC, 0o600); try moveFileZSlowWithHandle(in_handle, to_dir, destination); } @@ -133,7 +133,7 @@ pub fn moveFileZSlowWithHandle(in_handle: std.os.fd_t, to_dir: std.os.fd_t, dest // ftruncate() instead didn't work. // this is technically racy because it could end up deleting the file without saving std.os.unlinkatZ(to_dir, destination, 0) catch {}; - const out_handle = try std.os.openatZ(to_dir, destination, std.os.O.WRONLY | std.os.O.CREAT | std.os.O.CLOEXEC, 022); + const out_handle = try std.os.openatZ(to_dir, destination, std.os.O.WRONLY | std.os.O.CREAT | std.os.O.CLOEXEC, 0o22); defer std.os.close(out_handle); if (comptime Environment.isLinux) { _ = std.os.system.fallocate(out_handle, 0, 0, @intCast(i64, stat_.size)); diff --git a/src/install/install.zig b/src/install/install.zig index ea926197cd97ea..6c5e0a1a4099fd 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -2480,7 +2480,7 @@ pub const PackageManager = struct { _ = C.fchmod( tmpfile.fd, // chmod 666, - 0000040 | 0000004 | 0000002 | 0000400 | 0000200 | 0000020, + 0o40 | 0o4 | 0o2 | 0o400 | 0o200 | 0o20, ); try tmpfile.promote(tmpname, std.fs.cwd().fd, "yarn.lock"); From bfee0961d7beb8946b9cc98868ddad1a0db9e49c Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:06:33 +0100 Subject: [PATCH 17/51] Fix: @ctz - expected 1 argument, found 2 --- src/string_immutable.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 7dcf390a0eb2bf..1ebe7d5bf58e93 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -1427,7 +1427,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_ const mask = bytes & 0x8080808080808080; if (mask > 0) { - const first_set_byte = @ctz(Int, mask) / 8; + const first_set_byte = @ctz(@as(Int, mask)) / 8; if (comptime stop) return .{ .written = std.math.maxInt(u32), .read = std.math.maxInt(u32) }; if (comptime Environment.allow_assert) { From 00ca7a91c11afb9cd69c28931bf762a34470ab67 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:13:35 +0100 Subject: [PATCH 18/51] Fix: @ctz/@popCount - expected 1 argument, found 2 --- src/http/websocket.zig | 2 +- src/http/websocket_http_client.zig | 2 +- src/install/bit_set.zig | 28 ++++++++++++++-------------- src/string_immutable.zig | 6 +++--- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/src/http/websocket.zig b/src/http/websocket.zig index 12348f83c4ec4a..8e0b8468276cb4 100644 --- a/src/http/websocket.zig +++ b/src/http/websocket.zig @@ -190,7 +190,7 @@ pub const Websocket = struct { // Close and send the status pub fn close(self: *Websocket, code: u16) !void { - const c = if (native_endian == .Big) code else @byteSwap(u16, code); + const c = if (native_endian == .Big) code else @byteSwap(@as(u16, code)); const data = @bitCast([2]u8, c); _ = try self.writeMessage(.Close, &data); } diff --git a/src/http/websocket_http_client.zig b/src/http/websocket_http_client.zig index 1f2f7f5d6a783b..24aeaf4721e8e7 100644 --- a/src/http/websocket_http_client.zig +++ b/src/http/websocket_http_client.zig @@ -639,7 +639,7 @@ fn parseWebSocketHeader( // + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // | Payload Data continued ... | // +---------------------------------------------------------------+ - const header = @bitCast(WebsocketHeader, @byteSwap(u16, @bitCast(u16, bytes))); + const header = @bitCast(WebsocketHeader, @byteSwap(@as(u16, @bitCast(u16, bytes)))); const payload = @as(usize, header.len); payload_length.* = payload; receiving_type.* = header.opcode; diff --git a/src/install/bit_set.zig b/src/install/bit_set.zig index 64903c9e99c317..bb23b4e01dd446 100644 --- a/src/install/bit_set.zig +++ b/src/install/bit_set.zig @@ -91,7 +91,7 @@ pub fn IntegerBitSet(comptime size: u16) type { /// Returns the total number of set bits in this bit set. pub fn count(self: Self) usize { - return @popCount(MaskInt, self.mask); + return @popCount(@as(MaskInt, self.mask)); } /// Changes the value of the specified bit of the bit @@ -154,7 +154,7 @@ pub fn IntegerBitSet(comptime size: u16) type { pub fn findFirstSet(self: Self) ?usize { const mask = self.mask; if (mask == 0) return null; - return @ctz(MaskInt, mask); + return @ctz(@as(MaskInt, mask)); } /// Finds the index of the first set bit, and unsets it. @@ -162,7 +162,7 @@ pub fn IntegerBitSet(comptime size: u16) type { pub fn toggleFirstSet(self: *Self) ?usize { const mask = self.mask; if (mask == 0) return null; - const index = @ctz(MaskInt, mask); + const index = @ctz(@as(MaskInt, mask)); self.mask = mask & (mask - 1); return index; } @@ -197,12 +197,12 @@ pub fn IntegerBitSet(comptime size: u16) type { switch (direction) { .forward => { - const next_index = @ctz(MaskInt, self.bits_remain); + const next_index = @ctz(@as(MaskInt, self.bits_remain)); self.bits_remain &= self.bits_remain - 1; return next_index; }, .reverse => { - const leading_zeroes = @clz(MaskInt, self.bits_remain); + const leading_zeroes = @clz(@as(MaskInt, self.bits_remain)); const top_bit = (@bitSizeOf(MaskInt) - 1) - leading_zeroes; self.bits_remain &= (@as(MaskInt, 1) << @intCast(ShiftInt, top_bit)) - 1; return top_bit; @@ -322,7 +322,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { pub fn count(self: Self) usize { var total: usize = 0; for (self.masks) |mask| { - total += @popCount(MaskInt, mask); + total += @popCount(@as(MaskInt, mask)); } return total; } @@ -405,7 +405,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { if (mask != 0) break mask; offset += @bitSizeOf(MaskInt); } else return null; - return offset + @ctz(MaskInt, mask); + return offset + @ctz(@as(MaskInt, mask)); } /// Finds the index of the first set bit, and unsets it. @@ -416,7 +416,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { if (mask.* != 0) break mask; offset += @bitSizeOf(MaskInt); } else return null; - const index = @ctz(MaskInt, mask.*); + const index = @ctz(@as(MaskInt, mask.*)); mask.* &= (mask.* - 1); return offset + index; } @@ -587,7 +587,7 @@ pub const DynamicBitSetUnmanaged = struct { var total: usize = 0; for (self.masks[0..num_masks]) |mask| { // Note: This is where we depend on padding bits being zero - total += @popCount(MaskInt, mask); + total += @popCount(@as(MaskInt, mask)); } return total; } @@ -712,7 +712,7 @@ pub const DynamicBitSetUnmanaged = struct { mask += 1; offset += @bitSizeOf(MaskInt); } else return null; - return offset + @ctz(MaskInt, mask[0]); + return offset + @ctz(@as(MaskInt, mask[0])); } /// Finds the index of the first set bit, and unsets it. @@ -725,7 +725,7 @@ pub const DynamicBitSetUnmanaged = struct { mask += 1; offset += @bitSizeOf(MaskInt); } else return null; - const index = @ctz(MaskInt, mask[0]); + const index = @ctz(@as(MaskInt, mask[0])); mask[0] &= (mask[0] - 1); return offset + index; } @@ -929,7 +929,7 @@ pub const IteratorOptions = struct { // The iterator is reusable between several bit set types fn BitSetIterator(comptime MaskInt: type, comptime options: IteratorOptions) type { - const ShiftInt = std.math.Log2Int(MaskInt); + //const ShiftInt = std.math.Log2Int(MaskInt); const kind = options.kind; const direction = options.direction; return struct { @@ -978,12 +978,12 @@ fn BitSetIterator(comptime MaskInt: type, comptime options: IteratorOptions) typ switch (direction) { .forward => { - const next_index = @ctz(MaskInt, self.bits_remain) + self.bit_offset; + const next_index = @ctz(@as(MaskInt, self.bits_remain)) + self.bit_offset; self.bits_remain &= self.bits_remain - 1; return next_index; }, .reverse => { - const leading_zeroes = @clz(MaskInt, self.bits_remain); + const leading_zeroes = @clz(@as(MaskInt, self.bits_remain)); const top_bit = (@bitSizeOf(MaskInt) - 1) - leading_zeroes; const no_top_bit_mask = (@as(MaskInt, 1) << @intCast(ShiftInt, top_bit)) - 1; self.bits_remain &= no_top_bit_mask; diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 1ebe7d5bf58e93..2978d94f31852f 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -583,7 +583,7 @@ pub fn countChar(self: string, char: u8) usize { while (remaining.len >= 16) { const vec: AsciiVector = remaining[0..ascii_vector_size].*; - const cmp = @popCount(std.meta.Int(.unsigned, ascii_vector_size), @bitCast(@Vector(ascii_vector_size, u1), vec == splatted)); + const cmp = @popCount(@as(std.meta.Int(.unsigned, ascii_vector_size), @bitCast(@Vector(ascii_vector_size, u1), vec == splatted))); total += @as(usize, @reduce(.Add, cmp)); remaining = remaining[ascii_vector_size..]; } @@ -1506,7 +1506,7 @@ pub fn elementLengthLatin1IntoUTF8(comptime Type: type, latin1_: Type) usize { @bitCast(Int, latin1[size .. 2 * size].*) & 0x8080808080808080, }; - const non_ascii_count = ((@popCount(Int, bytes[0]) / 8) + (@popCount(Int, bytes[1]) / 8)); + const non_ascii_count = ((@popCount(@as(Int, bytes[0])) / 8) + (@popCount(@as(Int, bytes[1])) / 8)); total_non_ascii_count += non_ascii_count; } @@ -1516,7 +1516,7 @@ pub fn elementLengthLatin1IntoUTF8(comptime Type: type, latin1_: Type) usize { if (latin1.len >= 8) { const bytes = @bitCast(u64, latin1[0..8].*) & 0x8080808080808080; - total_non_ascii_count += @popCount(u64, bytes) / 8; + total_non_ascii_count += @popCount(@as(u64, bytes)) / 8; latin1 = latin1[8..]; } From 693167c1a63eaa3341ac42da291e523e7d2d0cc9 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:22:46 +0100 Subject: [PATCH 19/51] Fix: shadowing --- src/bun.js/unbounded_queue.zig | 4 ++-- src/bun.js/webcore/streams.zig | 4 ++-- src/install/lockfile.zig | 4 ++-- src/install/npm.zig | 4 ++-- src/js_parser.zig | 8 ++++---- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/bun.js/unbounded_queue.zig b/src/bun.js/unbounded_queue.zig index fd092290d672b9..86f1e44623e73c 100644 --- a/src/bun.js/unbounded_queue.zig +++ b/src/bun.js/unbounded_queue.zig @@ -19,7 +19,7 @@ pub const cache_line_length = switch (@import("builtin").target.cpu.arch) { }; pub fn UnboundedQueue(comptime T: type, comptime next_field: meta.FieldEnum(T)) type { - const next = meta.fieldInfo(T, next_field).name; + const _next = meta.fieldInfo(T, next_field).name; return struct { const Self = @This(); @@ -31,7 +31,7 @@ pub fn UnboundedQueue(comptime T: type, comptime next_field: meta.FieldEnum(T)) pub fn next(self: *Self.Batch.Iterator) ?*T { if (self.batch.count == 0) return null; const front = self.batch.front orelse unreachable; - self.batch.front = @field(front, next); + self.batch.front = @field(front, _next); self.batch.count -= 1; return front; } diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 9bd8821dc41d01..395ded5cf3ac20 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -2504,7 +2504,7 @@ pub fn ReadableStreamSource( comptime onStart: anytype, comptime onPull: anytype, comptime onCancel: fn (this: *Context) void, - comptime deinit: fn (this: *Context) void, + comptime _deinit: fn (this: *Context) void, ) type { return struct { context: Context, @@ -2562,7 +2562,7 @@ pub fn ReadableStreamSource( return; } this.deinited = true; - deinit(&this.context); + _deinit(&this.context); } pub fn getError(this: *This) ?Syscall.Error { diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index eb5457f9fd279c..086fe0f0156084 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -2619,12 +2619,12 @@ pub const Package = extern struct { break :bin; }, - .e_string => |str| { + .e_string => |_str| { if (str.data.len > 0) { package.bin = Bin{ .tag = Bin.Tag.file, .value = .{ - .file = string_builder.append(String, str.data), + .file = string_builder.append(String, _str.data), }, }; break :bin; diff --git a/src/install/npm.zig b/src/install/npm.zig index 88bc6c76c072b3..062228653a8e9e 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -1067,8 +1067,8 @@ pub const PackageManifest = struct { } } }, - .e_string => |str| { - package_version.os = OperatingSystem.apply(OperatingSystem.none, str.data); + .e_string => |_str| { + package_version.os = OperatingSystem.apply(OperatingSystem.none, _str.data); }, else => {}, } diff --git a/src/js_parser.zig b/src/js_parser.zig index d7c897bce0941d..015ac960c7fa99 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -12928,10 +12928,10 @@ fn NewParser_( fn isValidAssignmentTarget(p: *P, expr: Expr) bool { return switch (expr.data) { .e_identifier => |ident| !isEvalOrArguments(p.loadNameFromRef(ident.ref)), - .e_dot => |e| e.optional_chain == null, - .e_index => |e| e.optional_chain == null, - .e_array => |e| !e.is_parenthesized, - .e_object => |e| !e.is_parenthesized, + .e_dot => |_e| _e.optional_chain == null, + .e_index => |_e| _e.optional_chain == null, + .e_array => |_e| !_e.is_parenthesized, + .e_object => |_e| !_e.is_parenthesized, else => false, }; } From a2a0480b5530f03432e89c581a4d64f2a30e135d Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:28:37 +0100 Subject: [PATCH 20/51] Fix: shadowing --- src/bun.js/unbounded_queue.zig | 38 +++++++++++++++++----------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/src/bun.js/unbounded_queue.zig b/src/bun.js/unbounded_queue.zig index 86f1e44623e73c..96323b288c63a2 100644 --- a/src/bun.js/unbounded_queue.zig +++ b/src/bun.js/unbounded_queue.zig @@ -52,45 +52,45 @@ pub fn UnboundedQueue(comptime T: type, comptime next_field: meta.FieldEnum(T)) count: usize = 0, front: T align(queue_padding_length) = init: { var stub: T = undefined; - @field(stub, next) = null; + @field(stub, _next) = null; break :init stub; }, pub fn push(self: *Self, src: *T) void { assert(@atomicRmw(usize, &self.count, .Add, 1, .Release) >= 0); - @field(src, next) = null; + @field(src, _next) = null; const old_back = @atomicRmw(?*T, &self.back, .Xchg, src, .AcqRel) orelse &self.front; - @field(old_back, next) = src; + @field(old_back, _next) = src; } pub fn pushBatch(self: *Self, first: *T, last: *T, count: usize) void { assert(@atomicRmw(usize, &self.count, .Add, count, .Release) >= 0); - @field(last, next) = null; + @field(last, _next) = null; const old_back = @atomicRmw(?*T, &self.back, .Xchg, last, .AcqRel) orelse &self.front; - @field(old_back, next) = first; + @field(old_back, _next) = first; } pub fn pop(self: *Self) ?*T { - const first = @atomicLoad(?*T, &@field(self.front, next), .Acquire) orelse return null; - if (@atomicLoad(?*T, &@field(first, next), .Acquire)) |next_item| { - @atomicStore(?*T, &@field(self.front, next), next_item, .Monotonic); + const first = @atomicLoad(?*T, &@field(self.front, _next), .Acquire) orelse return null; + if (@atomicLoad(?*T, &@field(first, _next), .Acquire)) |next_item| { + @atomicStore(?*T, &@field(self.front, _next), next_item, .Monotonic); assert(@atomicRmw(usize, &self.count, .Sub, 1, .Monotonic) >= 1); return first; } const last = @atomicLoad(?*T, &self.back, .Acquire) orelse &self.front; if (first != last) return null; - @atomicStore(?*T, &@field(self.front, next), null, .Monotonic); + @atomicStore(?*T, &@field(self.front, _next), null, .Monotonic); if (@cmpxchgStrong(?*T, &self.back, last, &self.front, .AcqRel, .Acquire) == null) { assert(@atomicRmw(usize, &self.count, .Sub, 1, .Monotonic) >= 1); return first; } - var next_item = @atomicLoad(?*T, &@field(first, next), .Acquire); + var next_item = @atomicLoad(?*T, &@field(first, _next), .Acquire); while (next_item == null) : (atomic.spinLoopHint()) { - next_item = @atomicLoad(?*T, &@field(first, next), .Acquire); + next_item = @atomicLoad(?*T, &@field(first, _next), .Acquire); } - @atomicStore(?*T, &@field(self.front, next), next_item, .Monotonic); + @atomicStore(?*T, &@field(self.front, _next), next_item, .Monotonic); assert(@atomicRmw(usize, &self.count, .Sub, 1, .Monotonic) >= 1); return first; } @@ -98,10 +98,10 @@ pub fn UnboundedQueue(comptime T: type, comptime next_field: meta.FieldEnum(T)) pub fn popBatch(self: *Self) Self.Batch { var batch: Self.Batch = .{}; - var front = @atomicLoad(?*T, &@field(self.front, next), .Acquire) orelse return batch; + var front = @atomicLoad(?*T, &@field(self.front, _next), .Acquire) orelse return batch; batch.front = front; - var next_item = @atomicLoad(?*T, &@field(front, next), .Acquire); + var next_item = @atomicLoad(?*T, &@field(front, _next), .Acquire); while (next_item) |next_node| : (next_item = @atomicLoad(?*T, &@field(next_node, next), .Acquire)) { batch.count += 1; batch.last = front; @@ -111,12 +111,12 @@ pub fn UnboundedQueue(comptime T: type, comptime next_field: meta.FieldEnum(T)) const last = @atomicLoad(?*T, &self.back, .Acquire) orelse &self.front; if (front != last) { - @atomicStore(?*T, &@field(self.front, next), front, .Release); + @atomicStore(?*T, &@field(self.front, _next), front, .Release); assert(@atomicRmw(usize, &self.count, .Sub, batch.count, .Monotonic) >= batch.count); return batch; } - @atomicStore(?*T, &@field(self.front, next), null, .Monotonic); + @atomicStore(?*T, &@field(self.front, _next), null, .Monotonic); if (@cmpxchgStrong(?*T, &self.back, last, &self.front, .AcqRel, .Acquire) == null) { batch.count += 1; batch.last = front; @@ -124,13 +124,13 @@ pub fn UnboundedQueue(comptime T: type, comptime next_field: meta.FieldEnum(T)) return batch; } - next_item = @atomicLoad(?*T, &@field(front, next), .Acquire); + next_item = @atomicLoad(?*T, &@field(front, _next), .Acquire); while (next_item == null) : (atomic.spinLoopHint()) { - next_item = @atomicLoad(?*T, &@field(front, next), .Acquire); + next_item = @atomicLoad(?*T, &@field(front, _next), .Acquire); } batch.count += 1; - @atomicStore(?*T, &@field(self.front, next), next_item, .Monotonic); + @atomicStore(?*T, &@field(self.front, _next), next_item, .Monotonic); batch.last = front; assert(@atomicRmw(usize, &self.count, .Sub, batch.count, .Monotonic) >= batch.count); return batch; From 75888507929dc28f9fd960c79e6ea674c723ef0f Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:28:43 +0100 Subject: [PATCH 21/51] Fix: shadowing --- src/bun.js/api/bun.zig | 1 - src/bun.js/base.zig | 4 ---- src/install/npm.zig | 6 +++--- 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/src/bun.js/api/bun.zig b/src/bun.js/api/bun.zig index 58407e2c059e39..4d6cb9f79ea8ec 100644 --- a/src/bun.js/api/bun.zig +++ b/src/bun.js/api/bun.zig @@ -3408,7 +3408,6 @@ pub const EnvironmentVariables = struct { pub fn convertToType(ctx: js.JSContextRef, obj: js.JSObjectRef, kind: js.JSType, exception: js.ExceptionRef) callconv(.C) js.JSValueRef { _ = ctx; - _ = obj; _ = kind; _ = exception; return obj; diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index 167ffb26fbbff5..96e3130f3ff0c9 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -1313,8 +1313,6 @@ pub fn NewClassWithInstanceType( const FunctionDeclarationsFormatter = struct { pub fn format(_: @This(), comptime fmt: []const u8, opts: std.fmt.FormatOptions, writer: anytype) !void { - _ = fmt; - _ = writer; const definition = getDefinition(); if (static_functions__.len > 1) { for (definition.staticFunctions[0 .. static_functions__.len - 1]) |_, i| { @@ -1375,7 +1373,6 @@ pub fn NewClassWithInstanceType( \\}} // namespace Bun \\ ; - _ = writer; _ = header_file; const Opts = struct { name: string }; try writer.print(header_file, Opts{ @@ -1987,7 +1984,6 @@ pub fn NewClassWithInstanceType( for (function_name_literals) |function_name_literal, i| { const is_read_only = options.read_only; - _ = i; switch (@typeInfo(@TypeOf(@field(staticFunctions, function_name_literal)))) { .Struct => { const CtxField = @field(staticFunctions, function_name_literals[i]); diff --git a/src/install/npm.zig b/src/install/npm.zig index 062228653a8e9e..e10cc2d508a115 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -1132,12 +1132,12 @@ pub const PackageManifest = struct { break :bin; }, - .e_string => |str| { - if (str.data.len > 0) { + .e_string => |_str| { + if (_str.data.len > 0) { package_version.bin = Bin{ .tag = Bin.Tag.file, .value = .{ - .file = string_builder.append(String, str.data), + .file = string_builder.append(String, _str.data), }, }; break :bin; From 83fdc4501666f99dce966760f68dab4b40578419 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:37:49 +0100 Subject: [PATCH 22/51] Fix: unused --- src/bun.js/api/server.zig | 2 - src/bun.js/base.zig | 1 - src/bun.js/event_loop.zig | 1 - src/bun.js/node/node_fs.zig | 144 +++++++--------------------- src/bun.js/node/node_fs_binding.zig | 1 - src/bun.js/unbounded_queue.zig | 2 +- src/darwin_c.zig | 3 - 7 files changed, 33 insertions(+), 121 deletions(-) diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 6639195f1f2f5e..9f3333a27ac235 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -517,8 +517,6 @@ pub fn NewRequestContextStackAllocator(comptime RequestContext: type, comptime c buf_align: u29, return_address: usize, ) void { - _ = buf_align; - _ = return_address; const bytes = std.mem.asBytes(&self.buf); if (sliceContainsSlice(bytes, buf)) { const index = if (bytes[0..buf.len].ptr != buf.ptr) diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index 96e3130f3ff0c9..f7085366f97b5a 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -1373,7 +1373,6 @@ pub fn NewClassWithInstanceType( \\}} // namespace Bun \\ ; - _ = header_file; const Opts = struct { name: string }; try writer.print(header_file, Opts{ .name = std.mem.span(name), diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index 0c99a949acf950..ea760234a41fbf 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -779,7 +779,6 @@ pub const Poller = struct { } pub fn onTick(loop: *uws.Loop, tagged_pointer: ?*anyopaque) callconv(.C) void { - _ = loop; _ = tagged_pointer; if (comptime Environment.isMac) dispatchKQueueEvent(loop, &loop.ready_polls[@intCast(usize, loop.current_ready_poll)]) diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 669537184da0b0..3a82c4470fcabf 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -2386,7 +2386,6 @@ pub const NodeFS = struct { return Maybe(Return.AppendFile).success; }, else => { - _ = this; @compileError("Not implemented yet"); }, } @@ -2415,7 +2414,6 @@ pub const NodeFS = struct { return Maybe(Return.AppendFile).success; }, else => { - _ = this; @compileError("Not implemented yet"); }, } @@ -2654,9 +2652,7 @@ pub const NodeFS = struct { } }, else => { - _ = args; _ = this; - _ = flavor; }, } @@ -2676,9 +2672,6 @@ pub const NodeFS = struct { }, else => {}, } - _ = args; - _ = this; - _ = flavor; return Ret.todo; } @@ -2689,9 +2682,6 @@ pub const NodeFS = struct { .sync => return Syscall.chown(path, args.uid, args.gid), else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Chown).todo; } @@ -2706,26 +2696,22 @@ pub const NodeFS = struct { }, else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Chmod).todo; } /// This should almost never be async pub fn fchmod(this: *NodeFS, args: Arguments.FChmod, comptime flavor: Flavor) Maybe(Return.Fchmod) { + _ = this; switch (comptime flavor) { .sync => { return Syscall.fchmod(args.fd, args.mode); }, else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Fchmod).todo; } pub fn fchown(this: *NodeFS, args: Arguments.Fchown, comptime flavor: Flavor) Maybe(Return.Fchown) { + _ = this; switch (comptime flavor) { .sync => { return Maybe(Return.Fchown).errnoSys(C.fchown(args.fd, args.uid, args.gid), .fchown) orelse @@ -2733,24 +2719,20 @@ pub const NodeFS = struct { }, else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Fchown).todo; } pub fn fdatasync(this: *NodeFS, args: Arguments.FdataSync, comptime flavor: Flavor) Maybe(Return.Fdatasync) { + _ = this; switch (comptime flavor) { .sync => return Maybe(Return.Fdatasync).errnoSys(system.fdatasync(args.fd), .fdatasync) orelse Maybe(Return.Fdatasync).success, else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Fdatasync).todo; } pub fn fstat(this: *NodeFS, args: Arguments.Fstat, comptime flavor: Flavor) Maybe(Return.Fstat) { + _ = this; if (args.big_int) return Maybe(Return.Fstat).todo; switch (comptime flavor) { @@ -2763,38 +2745,32 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Fstat).todo; } pub fn fsync(this: *NodeFS, args: Arguments.Fsync, comptime flavor: Flavor) Maybe(Return.Fsync) { + _ = this; switch (comptime flavor) { .sync => return Maybe(Return.Fsync).errnoSys(system.fsync(args.fd), .fsync) orelse Maybe(Return.Fsync).success, else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Fsync).todo; } pub fn ftruncate(this: *NodeFS, args: Arguments.FTruncate, comptime flavor: Flavor) Maybe(Return.Ftruncate) { + _ = this; switch (comptime flavor) { .sync => return Maybe(Return.Ftruncate).errnoSys(system.ftruncate(args.fd, args.len orelse 0), .ftruncate) orelse Maybe(Return.Ftruncate).success, else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Ftruncate).todo; } pub fn futimes(this: *NodeFS, args: Arguments.Futimes, comptime flavor: Flavor) Maybe(Return.Futimes) { + _ = this; var times = [2]std.os.timespec{ .{ .tv_sec = args.mtime, @@ -2814,9 +2790,6 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Futimes).todo; } @@ -2830,9 +2803,6 @@ pub const NodeFS = struct { }, else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Lchmod).todo; } @@ -2846,9 +2816,6 @@ pub const NodeFS = struct { }, else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Lchown).todo; } pub fn link(this: *NodeFS, args: Arguments.Link, comptime flavor: Flavor) Maybe(Return.Link) { @@ -2864,12 +2831,10 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Link).todo; } pub fn lstat(this: *NodeFS, args: Arguments.Lstat, comptime flavor: Flavor) Maybe(Return.Lstat) { + _ = this; if (args.big_int) return Maybe(Return.Lstat).todo; switch (comptime flavor) { @@ -2886,9 +2851,6 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Lstat).todo; } @@ -2897,6 +2859,7 @@ pub const NodeFS = struct { } // Node doesn't absolute the path so we don't have to either fn mkdirNonRecursive(this: *NodeFS, args: Arguments.Mkdir, comptime flavor: Flavor) Maybe(Return.Mkdir) { + _ = this; switch (comptime flavor) { .sync => { const path = args.path.sliceZ(&this.sync_error_buf); @@ -2907,15 +2870,14 @@ pub const NodeFS = struct { }, else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Mkdir).todo; } // TODO: windows // TODO: verify this works correctly with unicode codepoints fn mkdirRecursive(this: *NodeFS, args: Arguments.Mkdir, comptime flavor: Flavor) Maybe(Return.Mkdir) { + _ = this; const Option = Maybe(Return.Mkdir); if (comptime Environment.isWindows) @compileError("This needs to be implemented on Windows."); @@ -3044,13 +3006,13 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Mkdir).todo; } pub fn mkdtemp(this: *NodeFS, args: Arguments.MkdirTemp, comptime flavor: Flavor) Maybe(Return.Mkdtemp) { + _ = this; + var prefix_buf = &this.sync_error_buf; const len = @min(args.prefix.len, prefix_buf.len - 7); if (len > 0) { @@ -3065,8 +3027,6 @@ pub const NodeFS = struct { else => |errno| return .{ .err = Syscall.Error{ .errno = @truncate(Syscall.Error.Int, @enumToInt(errno)), .syscall = .mkdtemp } }, } - _ = this; - _ = flavor; return .{ .result = PathString.init(bun.default_allocator.dupe(u8, std.mem.span(rc.?)) catch unreachable), }; @@ -3086,22 +3046,17 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Open).todo; } pub fn openDir(this: *NodeFS, args: Arguments.OpenDir, comptime flavor: Flavor) Maybe(Return.OpenDir) { - _ = args; _ = this; _ = flavor; return Maybe(Return.OpenDir).todo; } fn _read(this: *NodeFS, args: Arguments.Read, comptime flavor: Flavor) Maybe(Return.Read) { - _ = args; - _ = this; - _ = flavor; + if (Environment.allow_assert) std.debug.assert(args.position == null); switch (comptime flavor) { @@ -3171,9 +3126,7 @@ pub const NodeFS = struct { return if (args.position != null) _pwrite(this, args, flavor) else _write(this, args, flavor); } fn _write(this: *NodeFS, args: Arguments.Write, comptime flavor: Flavor) Maybe(Return.Write) { - _ = args; - _ = this; - _ = flavor; + switch (comptime flavor) { .sync => { @@ -3199,9 +3152,7 @@ pub const NodeFS = struct { } fn _pwrite(this: *NodeFS, args: Arguments.Write, comptime flavor: Flavor) Maybe(Return.Write) { - _ = args; - _ = this; - _ = flavor; + const position = args.position.?; @@ -3335,9 +3286,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Readdir).todo; } pub fn readFile(this: *NodeFS, args: Arguments.ReadFile, comptime flavor: Flavor) Maybe(Return.ReadFile) { @@ -3457,9 +3406,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.ReadFile).todo; } @@ -3515,9 +3462,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.WriteFile).todo; } @@ -3549,9 +3494,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Readlink).todo; } pub fn realpath(this: *NodeFS, args: Arguments.Realpath, comptime flavor: Flavor) Maybe(Return.Realpath) { @@ -3606,9 +3549,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Realpath).todo; } pub const realpathNative = realpath; @@ -3631,9 +3572,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Rename).todo; } pub fn rmdir(this: *NodeFS, args: Arguments.RmDir, comptime flavor: Flavor) Maybe(Return.Rmdir) { @@ -3647,15 +3586,11 @@ pub const NodeFS = struct { }, else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Rmdir).todo; } pub fn rm(this: *NodeFS, args: Arguments.RmDir, comptime flavor: Flavor) Maybe(Return.Rm) { - _ = args; - _ = this; - _ = flavor; + switch (comptime flavor) { .sync => { if (comptime Environment.isMac) { @@ -3835,9 +3770,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Stat).todo; } @@ -3854,9 +3787,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Symlink).todo; } fn _truncate(this: *NodeFS, path: PathLike, len: JSC.WebCore.Blob.SizeType, comptime flavor: Flavor) Maybe(Return.Truncate) { @@ -3894,15 +3825,11 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Unlink).todo; } pub fn unwatchFile(this: *NodeFS, args: Arguments.UnwatchFile, comptime flavor: Flavor) Maybe(Return.UnwatchFile) { - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.UnwatchFile).todo; } pub fn utimes(this: *NodeFS, args: Arguments.Utimes, comptime flavor: Flavor) Maybe(Return.Utimes) { @@ -3930,9 +3857,7 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Utimes).todo; } @@ -3961,15 +3886,10 @@ pub const NodeFS = struct { else => {}, } - _ = args; - _ = this; - _ = flavor; + return Maybe(Return.Lutimes).todo; } pub fn watch(this: *NodeFS, args: Arguments.Watch, comptime flavor: Flavor) Maybe(Return.Watch) { - _ = args; - _ = this; - _ = flavor; return Maybe(Return.Watch).todo; } pub fn createReadStream(_: *NodeFS, _: Arguments.CreateReadStream, comptime _: Flavor) Maybe(Return.CreateReadStream) { diff --git a/src/bun.js/node/node_fs_binding.zig b/src/bun.js/node/node_fs_binding.zig index d4d97e9a98b5b5..54db57732369df 100644 --- a/src/bun.js/node/node_fs_binding.zig +++ b/src/bun.js/node/node_fs_binding.zig @@ -91,7 +91,6 @@ fn call(comptime Function: NodeFSFunctionEnum) NodeFSFunction { exception: JSC.C.ExceptionRef, ) JSC.C.JSValueRef { _ = this; - _ = ctx; _ = arguments; var err = JSC.SystemError{}; exception.* = err.toErrorInstance(ctx.ptr()).asObjectRef(); diff --git a/src/bun.js/unbounded_queue.zig b/src/bun.js/unbounded_queue.zig index 96323b288c63a2..64149bae6ad81d 100644 --- a/src/bun.js/unbounded_queue.zig +++ b/src/bun.js/unbounded_queue.zig @@ -102,7 +102,7 @@ pub fn UnboundedQueue(comptime T: type, comptime next_field: meta.FieldEnum(T)) batch.front = front; var next_item = @atomicLoad(?*T, &@field(front, _next), .Acquire); - while (next_item) |next_node| : (next_item = @atomicLoad(?*T, &@field(next_node, next), .Acquire)) { + while (next_item) |next_node| : (next_item = @atomicLoad(?*T, &@field(next_node, _next), .Acquire)) { batch.count += 1; batch.last = front; diff --git a/src/darwin_c.zig b/src/darwin_c.zig index b798801fe5006b..3bdb9c72491d3d 100644 --- a/src/darwin_c.zig +++ b/src/darwin_c.zig @@ -603,15 +603,12 @@ const IO_CTL_RELATED = struct { return _IOC(IOC_VOID, g, n, @as(c_int, 0)); } pub inline fn _IOR(g: anytype, n: anytype, t: anytype) @TypeOf(_IOC(IOC_OUT, g, n, @import("std").zig.c_translation.sizeof(t))) { - _ = t; return _IOC(IOC_OUT, g, n, @import("std").zig.c_translation.sizeof(t)); } pub inline fn _IOW(g: anytype, n: anytype, t: anytype) @TypeOf(_IOC(IOC_IN, g, n, @import("std").zig.c_translation.sizeof(t))) { - _ = t; return _IOC(IOC_IN, g, n, @import("std").zig.c_translation.sizeof(t)); } pub inline fn _IOWR(g: anytype, n: anytype, t: anytype) @TypeOf(_IOC(IOC_INOUT, g, n, @import("std").zig.c_translation.sizeof(t))) { - _ = t; return _IOC(IOC_INOUT, g, n, @import("std").zig.c_translation.sizeof(t)); } pub const TIOCMODG = _IOR('t', @as(c_int, 3), c_int); From 1818662fcb3fee93c01d38cf1b6488fe177f3125 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:38:19 +0100 Subject: [PATCH 23/51] Fix: undo mistake --- src/install/bit_set.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/install/bit_set.zig b/src/install/bit_set.zig index bb23b4e01dd446..1e2501ba52829c 100644 --- a/src/install/bit_set.zig +++ b/src/install/bit_set.zig @@ -929,7 +929,7 @@ pub const IteratorOptions = struct { // The iterator is reusable between several bit set types fn BitSetIterator(comptime MaskInt: type, comptime options: IteratorOptions) type { - //const ShiftInt = std.math.Log2Int(MaskInt); + const ShiftInt = std.math.Log2Int(MaskInt); const kind = options.kind; const direction = options.direction; return struct { From 58e73e13fc2e309cf567f5bb487eaa842fa7ed05 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:40:10 +0100 Subject: [PATCH 24/51] Fix: function type cannot have a name --- src/panic_handler.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/panic_handler.zig b/src/panic_handler.zig index 2e2b2b45149f05..f07d2f6bb0d669 100644 --- a/src/panic_handler.zig +++ b/src/panic_handler.zig @@ -16,7 +16,7 @@ const Features = @import("./analytics/analytics_thread.zig").Features; const HTTP = @import("http").AsyncHTTP; const Report = @import("./report.zig"); -pub fn NewPanicHandler(comptime panic_func: fn handle_panic(msg: []const u8, error_return_type: ?*std.builtin.StackTrace) noreturn) type { +pub fn NewPanicHandler(comptime panic_func: fn (msg: []const u8, error_return_type: ?*std.builtin.StackTrace) noreturn) type { return struct { panic_count: usize = 0, skip_next_panic: bool = false, From 379e1189e378a675b6338ff8c0cd92db97556195 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:45:06 +0100 Subject: [PATCH 25/51] Fix: unused stuff --- src/bun.js/node/node_fs.zig | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 3a82c4470fcabf..4132b4348c64cc 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -2834,7 +2834,6 @@ pub const NodeFS = struct { return Maybe(Return.Link).todo; } pub fn lstat(this: *NodeFS, args: Arguments.Lstat, comptime flavor: Flavor) Maybe(Return.Lstat) { - _ = this; if (args.big_int) return Maybe(Return.Lstat).todo; switch (comptime flavor) { @@ -2859,7 +2858,6 @@ pub const NodeFS = struct { } // Node doesn't absolute the path so we don't have to either fn mkdirNonRecursive(this: *NodeFS, args: Arguments.Mkdir, comptime flavor: Flavor) Maybe(Return.Mkdir) { - _ = this; switch (comptime flavor) { .sync => { const path = args.path.sliceZ(&this.sync_error_buf); @@ -2877,7 +2875,6 @@ pub const NodeFS = struct { // TODO: windows // TODO: verify this works correctly with unicode codepoints fn mkdirRecursive(this: *NodeFS, args: Arguments.Mkdir, comptime flavor: Flavor) Maybe(Return.Mkdir) { - _ = this; const Option = Maybe(Return.Mkdir); if (comptime Environment.isWindows) @compileError("This needs to be implemented on Windows."); @@ -3011,7 +3008,7 @@ pub const NodeFS = struct { } pub fn mkdtemp(this: *NodeFS, args: Arguments.MkdirTemp, comptime flavor: Flavor) Maybe(Return.Mkdtemp) { - _ = this; + _ = flavor; var prefix_buf = &this.sync_error_buf; const len = @min(args.prefix.len, prefix_buf.len - 7); @@ -3051,11 +3048,13 @@ pub const NodeFS = struct { } pub fn openDir(this: *NodeFS, args: Arguments.OpenDir, comptime flavor: Flavor) Maybe(Return.OpenDir) { _ = this; + _ = args; _ = flavor; return Maybe(Return.OpenDir).todo; } fn _read(this: *NodeFS, args: Arguments.Read, comptime flavor: Flavor) Maybe(Return.Read) { + _ = this; if (Environment.allow_assert) std.debug.assert(args.position == null); @@ -3126,7 +3125,7 @@ pub const NodeFS = struct { return if (args.position != null) _pwrite(this, args, flavor) else _write(this, args, flavor); } fn _write(this: *NodeFS, args: Arguments.Write, comptime flavor: Flavor) Maybe(Return.Write) { - + _ = this; switch (comptime flavor) { .sync => { @@ -3152,7 +3151,7 @@ pub const NodeFS = struct { } fn _pwrite(this: *NodeFS, args: Arguments.Write, comptime flavor: Flavor) Maybe(Return.Write) { - + _ = this; const position = args.position.?; @@ -3799,8 +3798,6 @@ pub const NodeFS = struct { else => {}, } - _ = this; - _ = flavor; return Maybe(Return.Truncate).todo; } pub fn truncate(this: *NodeFS, args: Arguments.Truncate, comptime flavor: Flavor) Maybe(Return.Truncate) { @@ -3829,7 +3826,9 @@ pub const NodeFS = struct { return Maybe(Return.Unlink).todo; } pub fn unwatchFile(this: *NodeFS, args: Arguments.UnwatchFile, comptime flavor: Flavor) Maybe(Return.UnwatchFile) { - + _ = this; + _ = args; + _ = flavor; return Maybe(Return.UnwatchFile).todo; } pub fn utimes(this: *NodeFS, args: Arguments.Utimes, comptime flavor: Flavor) Maybe(Return.Utimes) { @@ -3890,12 +3889,21 @@ pub const NodeFS = struct { return Maybe(Return.Lutimes).todo; } pub fn watch(this: *NodeFS, args: Arguments.Watch, comptime flavor: Flavor) Maybe(Return.Watch) { + _ = this; + _ = args; + _ = flavor; return Maybe(Return.Watch).todo; } pub fn createReadStream(_: *NodeFS, _: Arguments.CreateReadStream, comptime _: Flavor) Maybe(Return.CreateReadStream) { + _ = this; + _ = args; + _ = flavor; return Maybe(Return.CreateReadStream).todo; } pub fn createWriteStream(_: *NodeFS, _: Arguments.CreateWriteStream, comptime _: Flavor) Maybe(Return.CreateWriteStream) { + _ = this; + _ = args; + _ = flavor; return Maybe(Return.CreateWriteStream).todo; } }; From 0cf1a498a3590d7db70d2fb0c9cc65f0b683f9cf Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:45:27 +0100 Subject: [PATCH 26/51] Fix: addIncludeDir -> addIncludeDir --- build.zig | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/build.zig b/build.zig index 40f770e0bff1a3..6b46ca40afa389 100644 --- a/build.zig +++ b/build.zig @@ -6,13 +6,13 @@ fn pkgPath(comptime out: []const u8) std.build.FileSource { return .{ .path = outpath }; } pub fn addPicoHTTP(step: *std.build.LibExeObjStep, comptime with_obj: bool) void { - step.addIncludeDir("src/deps"); + step.addIncludePath("src/deps"); if (with_obj) { step.addObjectFile("src/deps/picohttpparser.o"); } - step.addIncludeDir("src/deps"); + step.addIncludePath("src/deps"); if (with_obj) { step.addObjectFile(panicIfNotFound("src/deps/picohttpparser.o")); @@ -595,7 +595,7 @@ pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, tar for (dirs_to_search.slice()) |deps_path| { var deps_dir = std.fs.cwd().openDir(deps_path, .{ .iterate = true }) catch continue; var iterator = deps_dir.iterate(); - obj.addIncludeDir(deps_path); + obj.addIncludePath(deps_path); obj.addLibPath(deps_path); while (iterator.next() catch null) |entr| { From e1d19a7ee21ea2aee0cef5ac0aac900222b93e02 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:48:59 +0100 Subject: [PATCH 27/51] Fix: openDirIterable --- build.zig | 2 +- src/bun.js/node/node_fs.zig | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/build.zig b/build.zig index 6b46ca40afa389..4e2e630d4f0ea7 100644 --- a/build.zig +++ b/build.zig @@ -593,7 +593,7 @@ pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, tar }); for (dirs_to_search.slice()) |deps_path| { - var deps_dir = std.fs.cwd().openDir(deps_path, .{ .iterate = true }) catch continue; + var deps_dir = std.fs.cwd().openIterableDir(deps_path, .{}) catch continue; var iterator = deps_dir.iterate(); obj.addIncludePath(deps_path); obj.addLibPath(deps_path); diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 4132b4348c64cc..c28c460941001f 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -3895,15 +3895,9 @@ pub const NodeFS = struct { return Maybe(Return.Watch).todo; } pub fn createReadStream(_: *NodeFS, _: Arguments.CreateReadStream, comptime _: Flavor) Maybe(Return.CreateReadStream) { - _ = this; - _ = args; - _ = flavor; return Maybe(Return.CreateReadStream).todo; } pub fn createWriteStream(_: *NodeFS, _: Arguments.CreateWriteStream, comptime _: Flavor) Maybe(Return.CreateWriteStream) { - _ = this; - _ = args; - _ = flavor; return Maybe(Return.CreateWriteStream).todo; } }; From 8029ee65937343045c1091f0022521986444c36f Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:51:51 +0100 Subject: [PATCH 28/51] Fix: addLibPath -> addLibraryPath --- build.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.zig b/build.zig index 4e2e630d4f0ea7..28ffc65e72af41 100644 --- a/build.zig +++ b/build.zig @@ -596,7 +596,7 @@ pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, tar var deps_dir = std.fs.cwd().openIterableDir(deps_path, .{}) catch continue; var iterator = deps_dir.iterate(); obj.addIncludePath(deps_path); - obj.addLibPath(deps_path); + obj.addLibraryPath(deps_path); while (iterator.next() catch null) |entr| { const entry: std.fs.Dir.Entry = entr; From 1e7e97100bb74bbd1686116f31b61f55ee8813f7 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:52:14 +0100 Subject: [PATCH 29/51] Fix: Dir.Entry -> IterableDir.Entry --- build.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.zig b/build.zig index 28ffc65e72af41..b20e51a8f2f82e 100644 --- a/build.zig +++ b/build.zig @@ -599,7 +599,7 @@ pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, tar obj.addLibraryPath(deps_path); while (iterator.next() catch null) |entr| { - const entry: std.fs.Dir.Entry = entr; + const entry: std.fs.IterableDir.Entry = entr; if (files_we_care_about.get(entry.name)) |obj_name| { var has_added = try added.getOrPut(std.hash.Wyhash.hash(0, obj_name)); if (!has_added.found_existing) { From ffcdad0d5611b4b45da1d8be95a0c5db679b572e Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:55:27 +0100 Subject: [PATCH 30/51] Fix: leading zeroes --- src/deps/zig-datetime/src/datetime.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/deps/zig-datetime/src/datetime.zig b/src/deps/zig-datetime/src/datetime.zig index 6e21190d9c00d2..578356fa815ec4 100644 --- a/src/deps/zig-datetime/src/datetime.zig +++ b/src/deps/zig-datetime/src/datetime.zig @@ -1022,7 +1022,7 @@ test "time-copy" { test "time-compare" { var t1 = try Time.create(8, 30, 0, 0); var t2 = try Time.create(9, 30, 0, 0); - var t3 = try Time.create(8, 00, 0, 0); + var t3 = try Time.create(8, 0, 0, 0); var t4 = try Time.create(9, 30, 17, 0); try testing.expect(t1.lt(t2)); From c020fe5a46cc5454f0b36982bd3f0fa68095c9fa Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 01:55:35 +0100 Subject: [PATCH 31/51] Fix: Shadowing --- src/deps/uws.zig | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/deps/uws.zig b/src/deps/uws.zig index 1551efba70c5bc..eb3c915e5f426f 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -465,17 +465,17 @@ pub const Loop = extern struct { uws_loop_defer(this, user_data, Handler.callback); } - fn NewHandler(comptime UserType: type, comptime callback: fn (UserType) void) type { + fn NewHandler(comptime UserType: type, comptime _callback: fn (UserType) void) type { return struct { loop: *Loop, pub fn removePost(handler: @This()) void { - return uws_loop_removePostHandler(handler.loop, callback); + return uws_loop_removePostHandler(handler.loop, _callback); } pub fn removePre(handler: @This()) void { - return uws_loop_removePostHandler(handler.loop, callback); + return uws_loop_removePostHandler(handler.loop, _callback); } pub fn callback(data: *anyopaque, _: *Loop) callconv(.C) void { - callback(@ptrCast(UserType, @alignCast(@alignOf(std.meta.Child(UserType)), data))); + _callback(@ptrCast(UserType, @alignCast(@alignOf(std.meta.Child(UserType)), data))); } }; } @@ -1139,13 +1139,13 @@ pub fn NewApp(comptime ssl: bool) type { port: i32, comptime UserData: type, user_data: UserData, - comptime handler: fn (UserData, ?*ThisApp.ListenSocket, uws_app_listen_config_t) void, + comptime _handler: fn (UserData, ?*ThisApp.ListenSocket, uws_app_listen_config_t) void, ) void { if (comptime is_bindgen) { unreachable; } const Wrapper = struct { - const handler = handler; + const handler = _handler; pub fn handle(socket: ?*uws.ListenSocket, conf: uws_app_listen_config_t, data: ?*anyopaque) callconv(.C) void { if (comptime UserData == void) { @call(.{ .modifier = .always_inline }, handler, .{ void{}, @ptrCast(?*ThisApp.ListenSocket, socket), conf }); @@ -1165,11 +1165,11 @@ pub fn NewApp(comptime ssl: bool) type { app: *ThisApp, comptime UserData: type, user_data: UserData, - comptime handler: fn (UserData, ?*ThisApp.ListenSocket) void, + comptime _handler: fn (UserData, ?*ThisApp.ListenSocket) void, config: uws_app_listen_config_t, ) void { const Wrapper = struct { - const handler = handler; + const handler = _handler; pub fn handle(socket: ?*uws.ListenSocket, data: ?*anyopaque) callconv(.C) void { if (comptime UserData == void) { From e0d9423cda87fd9c06e64a43b615d720a9e32a50 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 02:03:32 +0100 Subject: [PATCH 32/51] Fix: Ambiguous references --- src/bun.js/base.zig | 8 ++++++-- src/bun.js/bindings/bindings.zig | 10 +++++++++- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index f7085366f97b5a..fe64b7ecb0e998 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -1,3 +1,9 @@ +// TODO(vjpr): Ambiguous references +// +// const C = bun.C; +// const WebCore = @import("./webcore.zig"); +// + pub const js = @import("../jsc.zig").C; const std = @import("std"); const bun = @import("../global.zig"); @@ -9,12 +15,10 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const JavaScript = @import("./javascript.zig"); const ResolveError = JavaScript.ResolveError; const BuildError = JavaScript.BuildError; const JSC = @import("../jsc.zig"); -const WebCore = @import("./webcore.zig"); const Test = @import("./test/jest.zig"); const Fetch = WebCore.Fetch; const Response = WebCore.Response; diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index 9240ebf2599347..8674ba00042d8e 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -1,3 +1,12 @@ +// TODO(vjpr): +// Commented out because of `ambiguous reference` error. +// +// /Users/Vaughan/dev/fork/+bun/bun/src/jsc.zig:2:5: note: declared here +// pub const is_bindgen = @import("std").meta.globalOption("bindgen", bool) orelse false; +// +// const is_bindgen: bool = std.meta.globalOption("bindgen", bool) orelse false; +// + const std = @import("std"); const bun = @import("../../global.zig"); const string = bun.string; @@ -11,7 +20,6 @@ const ErrorableZigString = Exports.ErrorableZigString; const ErrorableResolvedSource = Exports.ErrorableResolvedSource; const ZigException = Exports.ZigException; const ZigStackTrace = Exports.ZigStackTrace; -const is_bindgen: bool = std.meta.globalOption("bindgen", bool) orelse false; const ArrayBuffer = @import("../base.zig").ArrayBuffer; const JSC = @import("../../jsc.zig"); const Shimmer = JSC.Shimmer; From ca05e1912a8acaef1019cc6cced85294b337da1b Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 03:06:43 +0100 Subject: [PATCH 33/51] Fix: empty test name --- src/main.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main.zig b/src/main.zig index fe5fc723469898..22ba7e97def640 100644 --- a/src/main.zig +++ b/src/main.zig @@ -62,7 +62,7 @@ pub fn main() void { pub const JavaScriptVirtualMachine = JavaScript.VirtualMachine; -test "" { +test { @import("std").testing.refAllDecls(@This()); std.mem.doNotOptimizeAway(JavaScriptVirtualMachine.fetch); From ab9c164f51119d4de88641ba35c1e24badabe88d Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 03:12:31 +0100 Subject: [PATCH 34/51] Fix: ambiguous errors --- src/bun.js/api/bun.zig | 6 +-- src/bun.js/base.zig | 66 +++++++++++++----------- src/bun.js/bindings/bindings.zig | 9 +--- src/bun.js/javascript.zig | 86 ++++++++++++++++++-------------- 4 files changed, 89 insertions(+), 78 deletions(-) diff --git a/src/bun.js/api/bun.zig b/src/bun.js/api/bun.zig index 4d6cb9f79ea8ec..f8f7b55a886598 100644 --- a/src/bun.js/api/bun.zig +++ b/src/bun.js/api/bun.zig @@ -969,7 +969,7 @@ fn doResolveWithArgs( } if (!errorable.success) { - exception.* = bun.cast(JSC.JSValueRef, errorable.result.err.ptr.?); + exception.* = bun.cast(js.JSValueRef, errorable.result.err.ptr.?); return null; } @@ -1009,7 +1009,7 @@ export fn Bun__resolve( specifier: JSValue, source: JSValue, ) JSC.JSValue { - var exception_ = [1]JSC.JSValueRef{null}; + var exception_ = [1]js.JSValueRef{null}; var exception = &exception_; const value = doResolveWithArgs(global, specifier.getZigString(global), source.getZigString(global), exception, true) orelse { return JSC.JSPromise.rejectedPromiseValue(global, JSC.JSValue.fromRef(exception[0])); @@ -1022,7 +1022,7 @@ export fn Bun__resolveSync( specifier: JSValue, source: JSValue, ) JSC.JSValue { - var exception_ = [1]JSC.JSValueRef{null}; + var exception_ = [1]js.JSValueRef{null}; var exception = &exception_; return doResolveWithArgs(global, specifier.getZigString(global), source.getZigString(global), exception, true) orelse { return JSC.JSValue.fromRef(exception[0]); diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index fe64b7ecb0e998..fc9df0b8a44b38 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -1,8 +1,7 @@ -// TODO(vjpr): Ambiguous references -// +// Commented because of `ambiguous references` error. // const C = bun.C; -// const WebCore = @import("./webcore.zig"); -// + +const WebCore = @import("./webcore.zig"); pub const js = @import("../jsc.zig").C; const std = @import("std"); @@ -844,7 +843,7 @@ pub fn NewConstructor( pub usingnamespace NewClassWithInstanceType(void, InstanceType.Class.class_options, staticFunctions, properties, InstanceType); const name_string = &ZigString.init(InstanceType.Class.class_options.name); pub fn constructor(ctx: js.JSContextRef) callconv(.C) js.JSObjectRef { - return JSValue.makeWithNameAndPrototype( + return JSC.JSValue.makeWithNameAndPrototype( ctx.ptr(), @This().get().*, InstanceType.Class.get().*, @@ -986,7 +985,7 @@ pub fn NewClassWithInstanceType( return result; } - pub fn putDOMCalls(globalThis: *JSC.JSGlobalObject, value: JSValue) void { + pub fn putDOMCalls(globalThis: *JSC.JSGlobalObject, value: JSC.JSValue) void { inline for (function_name_literals) |functionName| { const Function = comptime @field(staticFunctions, functionName); if (@TypeOf(Function) == type and @hasDecl(Function, "is_dom_call")) { @@ -2284,7 +2283,16 @@ pub fn NewClassWithInstanceType( // }; // } -const JSValue = JSC.JSValue; +// Removed because it would create ambiguous reference. +// +// base.zig: const JSValue = JSC.JSValue; +// base.zig: const JSC = @import("../jsc.zig"); +// jsc.zig: pub usingnamespace @import("./bun.js/bindings/bindings.zig"); +// bindings.zig: pub const JSValue +// jsc.zig: pub usingnamespace @import("./bun.js/bindings/base.zig"); +// +// const JSValue = JSC.JSValue; + const ZigString = JSC.ZigString; pub const PathString = bun.PathString; @@ -2410,7 +2418,7 @@ pub const ArrayBuffer = extern struct { return Stream{ .pos = 0, .buf = this.slice() }; } - pub fn create(globalThis: *JSC.JSGlobalObject, bytes: []const u8, comptime kind: JSC.JSValue.JSType) JSValue { + pub fn create(globalThis: *JSC.JSGlobalObject, bytes: []const u8, comptime kind: JSC.JSValue.JSType) JSC.JSValue { JSC.markBinding(@src()); return switch (comptime kind) { .Uint8Array => Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len), @@ -2419,8 +2427,8 @@ pub const ArrayBuffer = extern struct { }; } - extern "C" fn Bun__createUint8ArrayForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize) JSValue; - extern "C" fn Bun__createArrayBufferForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize) JSValue; + extern "C" fn Bun__createUint8ArrayForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize) JSC.JSValue; + extern "C" fn Bun__createArrayBufferForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize) JSC.JSValue; pub fn fromTypedArray(ctx: JSC.C.JSContextRef, value: JSC.JSValue, _: JSC.C.ExceptionRef) ArrayBuffer { var out = std.mem.zeroes(ArrayBuffer); @@ -2636,7 +2644,7 @@ pub const MarkedArrayBuffer = struct { } pub fn toNodeBuffer(this: MarkedArrayBuffer, ctx: js.JSContextRef) js.JSObjectRef { - return JSValue.createBufferWithCtx(ctx, this.buffer.byteSlice(), this.buffer.ptr, MarkedArrayBuffer_deallocator).asObjectRef(); + return JSC.JSValue.createBufferWithCtx(ctx, this.buffer.byteSlice(), this.buffer.ptr, MarkedArrayBuffer_deallocator).asObjectRef(); } pub fn toJSObjectRef(this: MarkedArrayBuffer, ctx: js.JSContextRef, exception: js.ExceptionRef) js.JSObjectRef { @@ -2685,7 +2693,7 @@ pub const RefString = struct { pub const Hash = u32; pub const Map = std.HashMap(Hash, *JSC.RefString, IdentityContext(Hash), 80); - pub fn toJS(this: *RefString, global: *JSC.JSGlobalObject) JSValue { + pub fn toJS(this: *RefString, global: *JSC.JSGlobalObject) JSC.JSValue { return JSC.ZigString.init(this.slice()).external(global, this, RefString__external); } @@ -2765,7 +2773,7 @@ pub const ExternalBuffer = struct { } pub fn toArrayBuffer(this: *ExternalBuffer, ctx: *JSC.JSGlobalObject) JSC.JSValue { - return JSValue.c(JSC.C.JSObjectMakeArrayBufferWithBytesNoCopy(ctx.ref(), this.buf.ptr, this.buf.len, ExternalBuffer_deallocator, this, null)); + return JSC.JSValue.c(JSC.C.JSObjectMakeArrayBufferWithBytesNoCopy(ctx.ref(), this.buf.ptr, this.buf.len, ExternalBuffer_deallocator, this, null)); } }; pub export fn ExternalBuffer_deallocator(bytes_: *anyopaque, ctx: *anyopaque) callconv(.C) void { @@ -2897,7 +2905,7 @@ pub fn getterWrap(comptime Container: type, comptime name: string) GetterType(Co _: js.JSStringRef, exception: js.ExceptionRef, ) js.JSObjectRef { - const result: JSValue = if (comptime std.meta.fields(ArgsTuple).len == 1) + const result: JSC.JSValue = if (comptime std.meta.fields(ArgsTuple).len == 1) @call(.{}, @field(Container, name), .{ this, }) @@ -3126,7 +3134,7 @@ pub fn DOMCall( thisValue: JSC.JSValue, arguments_ptr: [*]const JSC.JSValue, arguments_len: usize, - ) callconv(.C) JSValue { + ) callconv(.C) JSC.JSValue { return @call(.{}, @field(Container, functionName), .{ globalObject, thisValue, @@ -3143,7 +3151,7 @@ pub fn DOMCall( .@"fastpath" = fastpath, }); - pub fn put(globalObject: *JSC.JSGlobalObject, value: JSValue) void { + pub fn put(globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { shim.cppFn("put", .{ globalObject, value }); } @@ -3486,7 +3494,7 @@ pub fn wrapWithHasContainer( }, js.JSObjectRef => { args[i] = thisObject; - if (!JSValue.fromRef(thisObject).isCell() or !JSValue.fromRef(thisObject).isObject()) { + if (!JSC.JSValue.fromRef(thisObject).isCell() or !JSC.JSValue.fromRef(thisObject).isObject()) { JSC.throwInvalidArguments("Expected object", .{}, ctx, exception); iter.deinit(); return null; @@ -3495,7 +3503,7 @@ pub fn wrapWithHasContainer( js.ExceptionRef => { args[i] = exception; }, - JSValue => { + JSC.JSValue => { const val = eater(&iter) orelse { JSC.throwInvalidArguments("Missing argument", .{}, ctx, exception); iter.deinit(); @@ -3503,14 +3511,14 @@ pub fn wrapWithHasContainer( }; args[i] = val; }, - ?JSValue => { + ?JSC.JSValue => { args[i] = eater(&iter); }, else => @compileError("Unexpected Type " ++ @typeName(ArgType)), } } - var result: JSValue = @call(.{}, @field(Container, name), args); + var result: JSC.JSValue = @call(.{}, @field(Container, name), args); if (!result.isEmptyOrUndefinedOrNull() and result.isError()) { exception.* = result.asObjectRef(); iter.deinit(); @@ -3680,7 +3688,7 @@ pub fn wrapInstanceMethod( return JSC.JSValue.zero; }; }, - JSValue => { + JSC.JSValue => { const val = eater(&iter) orelse { globalThis.throwInvalidArguments("Missing argument", .{}); iter.deinit(); @@ -3688,7 +3696,7 @@ pub fn wrapInstanceMethod( }; args[i] = val; }, - ?JSValue => { + ?JSC.JSValue => { args[i] = eater(&iter); }, else => @compileError("Unexpected Type " ++ @typeName(ArgType)), @@ -3822,7 +3830,7 @@ pub fn wrapStaticMethod( return JSC.JSValue.zero; }; }, - JSValue => { + JSC.JSValue => { const val = eater(&iter) orelse { globalThis.throwInvalidArguments("Missing argument", .{}); iter.deinit(); @@ -3830,7 +3838,7 @@ pub fn wrapStaticMethod( }; args[i] = val; }, - ?JSValue => { + ?JSC.JSValue => { args[i] = eater(&iter); }, else => @compileError("Unexpected Type " ++ @typeName(ArgType)), @@ -3883,7 +3891,7 @@ pub fn cachedBoundFunction(comptime name: [:0]const u8, comptime callback: anyty if (existing.isEmpty()) { return ctx.ptr().putCachedObject( &ZigString.init(name_slice), - JSValue.fromRef(JSC.C.JSObjectMakeFunctionWithCallback(ctx, JSC.C.JSStringCreateStatic(name_slice.ptr, name_slice.len), call)), + JSC.JSValue.fromRef(JSC.C.JSObjectMakeFunctionWithCallback(ctx, JSC.C.JSStringCreateStatic(name_slice.ptr, name_slice.len), call)), ).asObjectRef(); } @@ -3996,7 +4004,7 @@ pub const Strong = extern struct { return str; } - pub fn get(this: *Strong) ?JSValue { + pub fn get(this: *Strong) ?JSC.JSValue { var ref = this.ref orelse return null; const result = ref.get(); if (result == .zero) { @@ -4006,7 +4014,7 @@ pub const Strong = extern struct { return result; } - pub fn swap(this: *Strong) JSValue { + pub fn swap(this: *Strong) JSC.JSValue { var ref = this.ref orelse return .zero; const result = ref.get(); if (result == .zero) { @@ -4022,7 +4030,7 @@ pub const Strong = extern struct { return ref.get() != .zero; } - pub fn trySwap(this: *Strong) ?JSValue { + pub fn trySwap(this: *Strong) ?JSC.JSValue { const result = this.swap(); if (result == .zero) { return null; @@ -4031,7 +4039,7 @@ pub const Strong = extern struct { return result; } - pub fn set(this: *Strong, globalThis: *JSC.JSGlobalObject, value: JSValue) void { + pub fn set(this: *Strong, globalThis: *JSC.JSGlobalObject, value: JSC.JSValue) void { var ref: *JSC.napi.Ref = this.ref orelse { if (value == .zero) return; this.ref = JSC.napi.Ref.create(globalThis, value); diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index 8674ba00042d8e..bed6697c71eeeb 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -1,11 +1,4 @@ -// TODO(vjpr): -// Commented out because of `ambiguous reference` error. -// -// /Users/Vaughan/dev/fork/+bun/bun/src/jsc.zig:2:5: note: declared here -// pub const is_bindgen = @import("std").meta.globalOption("bindgen", bool) orelse false; -// -// const is_bindgen: bool = std.meta.globalOption("bindgen", bool) orelse false; -// +const is_bindgen: bool = std.meta.globalOption("bindgen", bool) orelse false; const std = @import("std"); const bun = @import("../../global.zig"); diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index bb0e8d7b88a635..2478159fd3eacd 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -13,7 +13,7 @@ const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const StoredFileDescriptorType = bun.StoredFileDescriptorType; const Arena = @import("../mimalloc_arena.zig").Arena; -const C = bun.C; +// const C = bun.C; const NetworkThread = @import("http").NetworkThread; const IO = @import("io"); const Allocator = std.mem.Allocator; @@ -57,7 +57,6 @@ const JSError = @import("./base.zig").JSError; const d = @import("./base.zig").d; const MarkedArrayBuffer = @import("./base.zig").MarkedArrayBuffer; const getAllocator = @import("./base.zig").getAllocator; -const JSValue = @import("../jsc.zig").JSValue; const NewClass = @import("./base.zig").NewClass; const Microtask = @import("../jsc.zig").Microtask; const JSGlobalObject = @import("../jsc.zig").JSGlobalObject; @@ -69,7 +68,10 @@ const ZigException = @import("../jsc.zig").ZigException; const ZigStackTrace = @import("../jsc.zig").ZigStackTrace; const ErrorableResolvedSource = @import("../jsc.zig").ErrorableResolvedSource; const ResolvedSource = @import("../jsc.zig").ResolvedSource; -const JSPromise = @import("../jsc.zig").JSPromise; +// Exported by JSC. +// const JSPromise = @import("../jsc.zig").JSPromise; +// const JSValue = @import("../jsc.zig").JSValue; +// -- const JSInternalPromise = @import("../jsc.zig").JSInternalPromise; const JSModuleLoader = @import("../jsc.zig").JSModuleLoader; const JSPromiseRejectionOperation = @import("../jsc.zig").JSPromiseRejectionOperation; @@ -271,20 +273,28 @@ export fn Bun__readOriginTimerStart(vm: *JSC.VirtualMachine) f64 { return @floatCast(f64, (@intToFloat(f128, vm.origin_timestamp) + JSC.VirtualMachine.origin_relative_epoch) / 1_000_000.0); } -comptime { - if (!JSC.is_bindgen) { - _ = Bun__getDefaultGlobal; - _ = Bun__getVM; - _ = Bun__drainMicrotasks; - _ = Bun__queueTask; - _ = Bun__queueTaskConcurrently; - _ = Bun__handleRejectedPromise; - _ = Bun__readOriginTimer; - _ = Bun__onDidAppendPlugin; - _ = Bun__readOriginTimerStart; - _ = Bun__reportUnhandledError; - } -} +//////////////////////////////////////////////////////////////////////////////// +// +// TODO(vjpr): What was this for? Just to remove errors? +// +// It was causing `parameter of type '*src.bun.js.javascript.VirtualMachine' must be declared comptime` in `export fn Bun__readOriginTimer(vm: *JSC.VirtualMachine) u64 {` +// +// comptime { +// if (!JSC.is_bindgen) { +// _ = Bun__getDefaultGlobal; +// _ = Bun__getVM; +// _ = Bun__drainMicrotasks; +// _ = Bun__queueTask; +// _ = Bun__queueTaskConcurrently; +// _ = Bun__handleRejectedPromise; +// _ = Bun__readOriginTimer; +// _ = Bun__onDidAppendPlugin; +// _ = Bun__readOriginTimerStart; +// _ = Bun__reportUnhandledError; +// } +// } +// +//////////////////////////////////////////////////////////////////////////////// /// This function is called on the main thread /// The bunVM() call will assert this @@ -292,7 +302,7 @@ pub export fn Bun__queueTask(global: *JSGlobalObject, task: *JSC.CppTask) void { global.bunVM().eventLoop().enqueueTask(Task.init(task)); } -pub export fn Bun__reportUnhandledError(globalObject: *JSGlobalObject, value: JSValue) callconv(.C) JSValue { +pub export fn Bun__reportUnhandledError(globalObject: *JSGlobalObject, value: JSC.JSValue) callconv(.C) JSC.JSValue { var jsc_vm = globalObject.bunVM(); jsc_vm.onUnhandledError(globalObject, value); return JSC.JSValue.jsUndefined(); @@ -362,7 +372,7 @@ pub const VirtualMachine = struct { plugin_runner: ?PluginRunner = null, is_main_thread: bool = false, - last_reported_error_for_dedupe: JSValue = .zero, + last_reported_error_for_dedupe: JSC.JSValue = .zero, /// Do not access this field directly /// It exists in the VirtualMachine struct so that @@ -1218,14 +1228,14 @@ pub const VirtualMachine = struct { } } - pub fn runErrorHandlerWithDedupe(this: *VirtualMachine, result: JSValue, exception_list: ?*ExceptionList) void { + pub fn runErrorHandlerWithDedupe(this: *VirtualMachine, result: JSC.JSValue, exception_list: ?*ExceptionList) void { if (this.last_reported_error_for_dedupe == result and !this.last_reported_error_for_dedupe.isEmptyOrUndefinedOrNull()) return; this.runErrorHandler(result, exception_list); } - pub fn runErrorHandler(this: *VirtualMachine, result: JSValue, exception_list: ?*ExceptionList) void { + pub fn runErrorHandler(this: *VirtualMachine, result: JSC.JSValue, exception_list: ?*ExceptionList) void { if (!result.isEmptyOrUndefinedOrNull()) this.last_reported_error_for_dedupe = result; @@ -1365,7 +1375,7 @@ pub const VirtualMachine = struct { const errors_property = ZigString.init("errors"); pub fn printErrorlikeObject( this: *VirtualMachine, - value: JSValue, + value: JSC.JSValue, exception: ?*Exception, exception_list: ?*ExceptionList, comptime Writer: type, @@ -1404,13 +1414,13 @@ pub const VirtualMachine = struct { writer: Writer, current_exception_list: ?*ExceptionList = null, - pub fn iteratorWithColor(_vm: [*c]VM, globalObject: [*c]JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { + pub fn iteratorWithColor(_vm: [*c]VM, globalObject: [*c]JSGlobalObject, ctx: ?*anyopaque, nextValue: JSC.JSValue) callconv(.C) void { iterator(_vm, globalObject, nextValue, ctx.?, true); } - pub fn iteratorWithOutColor(_vm: [*c]VM, globalObject: [*c]JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { + pub fn iteratorWithOutColor(_vm: [*c]VM, globalObject: [*c]JSGlobalObject, ctx: ?*anyopaque, nextValue: JSC.JSValue) callconv(.C) void { iterator(_vm, globalObject, nextValue, ctx.?, false); } - inline fn iterator(_: [*c]VM, _: [*c]JSGlobalObject, nextValue: JSValue, ctx: ?*anyopaque, comptime color: bool) void { + inline fn iterator(_: [*c]VM, _: [*c]JSGlobalObject, nextValue: JSC.JSValue, ctx: ?*anyopaque, comptime color: bool) void { var this_ = @intToPtr(*@This(), @ptrToInt(ctx)); VirtualMachine.vm.printErrorlikeObject(nextValue, null, this_.current_exception_list, Writer, this_.writer, color); } @@ -1492,7 +1502,7 @@ pub const VirtualMachine = struct { }, else => { this.printErrorInstance( - @intToEnum(JSValue, @bitCast(JSValue.Type, (@ptrToInt(value)))), + @intToEnum(JSC.JSValue, @bitCast(JSC.JSValue.Type, (@ptrToInt(value)))), exception_list, Writer, writer, @@ -1509,7 +1519,7 @@ pub const VirtualMachine = struct { } } - pub fn reportUncaughtException(globalObject: *JSGlobalObject, exception: *JSC.Exception) JSValue { + pub fn reportUncaughtException(globalObject: *JSGlobalObject, exception: *JSC.Exception) JSC.JSValue { var jsc_vm = globalObject.bunVM(); jsc_vm.onUnhandledError(globalObject, exception.value()); return JSC.JSValue.jsUndefined(); @@ -1592,7 +1602,7 @@ pub const VirtualMachine = struct { pub fn remapZigException( this: *VirtualMachine, exception: *ZigException, - error_instance: JSValue, + error_instance: JSC.JSValue, exception_list: ?*ExceptionList, ) void { error_instance.toZigException(this.global, exception); @@ -1695,7 +1705,7 @@ pub const VirtualMachine = struct { } } - pub fn printErrorInstance(this: *VirtualMachine, error_instance: JSValue, exception_list: ?*ExceptionList, comptime Writer: type, writer: Writer, comptime allow_ansi_color: bool) !void { + pub fn printErrorInstance(this: *VirtualMachine, error_instance: JSC.JSValue, exception_list: ?*ExceptionList, comptime Writer: type, writer: Writer, comptime allow_ansi_color: bool) !void { var exception_holder = ZigException.Holder.init(); var exception = exception_holder.zigException(); this.remapZigException(exception, error_instance, exception_list); @@ -1948,14 +1958,14 @@ pub const EventListenerMixin = struct { request_context: *http.RequestContext, comptime CtxType: type, ctx: *CtxType, - comptime onError: fn (ctx: *CtxType, err: anyerror, value: JSValue, request_ctx: *http.RequestContext) anyerror!void, + comptime onError: fn (ctx: *CtxType, err: anyerror, value: JSC.JSValue, request_ctx: *http.RequestContext) anyerror!void, ) !void { if (comptime JSC.is_bindgen) unreachable; - var listeners = vm.event_listeners.get(EventType.fetch) orelse (return onError(ctx, error.NoListeners, JSValue.jsUndefined(), request_context) catch {}); - if (listeners.items.len == 0) return onError(ctx, error.NoListeners, JSValue.jsUndefined(), request_context) catch {}; + var listeners = vm.event_listeners.get(EventType.fetch) orelse (return onError(ctx, error.NoListeners, JSC.JSValue.jsUndefined(), request_context) catch {}); + if (listeners.items.len == 0) return onError(ctx, error.NoListeners, JSC.JSValue.jsUndefined(), request_context) catch {}; const FetchEventRejectionHandler = struct { - pub fn onRejection(_ctx: *anyopaque, err: anyerror, fetch_event: *FetchEvent, value: JSValue) void { + pub fn onRejection(_ctx: *anyopaque, err: anyerror, fetch_event: *FetchEvent, value: JSC.JSValue) void { onError( @intToPtr(*CtxType, @ptrToInt(_ctx)), err, @@ -2005,7 +2015,7 @@ pub const EventListenerMixin = struct { } if (!request_context.has_called_done) { - onError(ctx, error.FetchHandlerRespondWithNeverCalled, JSValue.jsUndefined(), request_context) catch {}; + onError(ctx, error.FetchHandlerRespondWithNeverCalled, JSC.JSValue.jsUndefined(), request_context) catch {}; return; } } @@ -2398,22 +2408,22 @@ pub const BuildError = struct { object.put( ctx, ZigString.static("line"), - JSValue.jsNumber(location.line), + JSC.JSValue.jsNumber(location.line), ); object.put( ctx, ZigString.static("column"), - JSValue.jsNumber(location.column), + JSC.JSValue.jsNumber(location.column), ); object.put( ctx, ZigString.static("length"), - JSValue.jsNumber(location.length), + JSC.JSValue.jsNumber(location.length), ); object.put( ctx, ZigString.static("offset"), - JSValue.jsNumber(location.offset), + JSC.JSValue.jsNumber(location.offset), ); return object.asObjectRef(); } From e4fe2d90470a7a3102ce9607d95566107c103fbf Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 03:36:39 +0100 Subject: [PATCH 35/51] Fix: builtin.TypeInfo -> builtin.Type --- src/bun.js/base.zig | 18 ++++++++--------- src/bun.js/bindings/header-gen.zig | 28 +++++++++++++-------------- src/bun.js/bindings/shimmer.zig | 6 +++--- src/bun.js/bindings/static_export.zig | 2 +- src/bun.js/node/node_fs_binding.zig | 4 ++-- src/bun.js/node/types.zig | 4 ++-- src/install/bit_set.zig | 2 +- src/js_ast.zig | 6 +++--- src/js_lexer/identifier_cache.zig | 2 +- src/json_parser.zig | 4 ++-- src/meta.zig | 2 +- src/router.zig | 2 +- src/tagged_pointer.zig | 10 +++++----- 13 files changed, 45 insertions(+), 45 deletions(-) diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index fc9df0b8a44b38..f8769dff97ad46 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -193,9 +193,9 @@ pub const To = struct { JSC.C.JSValueRef => value, else => { - const Info: std.builtin.TypeInfo = comptime @typeInfo(Type); + const Info: std.builtin.Type = comptime @typeInfo(Type); if (comptime Info == .Enum) { - const Enum: std.builtin.TypeInfo.Enum = Info.Enum; + const Enum: std.builtin.Type.Enum = Info.Enum; if (comptime !std.meta.trait.isNumber(Enum.tag_type)) { zig_str = JSC.ZigString.init(@tagName(value)); return zig_str.toValue(context.ptr()).asObjectRef(); @@ -1998,7 +1998,7 @@ pub fn NewClassWithInstanceType( def.callAsFunction = To.JS.Callback(ZigType, staticFunctions.call.rfn).rfn; } else if (strings.eqlComptime(function_name_literal, "callAsFunction")) { const ctxfn = @field(staticFunctions, function_name_literal).rfn; - const Func: std.builtin.TypeInfo.Fn = @typeInfo(@TypeOf(ctxfn)).Fn; + const Func: std.builtin.Type.Fn = @typeInfo(@TypeOf(ctxfn)).Fn; const PointerType = std.meta.Child(Func.args[0].arg_type.?); @@ -2023,7 +2023,7 @@ pub fn NewClassWithInstanceType( @compileError("Expected " ++ options.name ++ "." ++ function_name_literal ++ " to have .rfn"); } const ctxfn = CtxField.rfn; - const Func: std.builtin.TypeInfo.Fn = @typeInfo(@TypeOf(ctxfn)).Fn; + const Func: std.builtin.Type.Fn = @typeInfo(@TypeOf(ctxfn)).Fn; var attributes: c_uint = @enumToInt(js.JSPropertyAttributes.kJSPropertyAttributeNone); @@ -2895,7 +2895,7 @@ pub const JSPropertyNameIterator = struct { pub fn getterWrap(comptime Container: type, comptime name: string) GetterType(Container) { return struct { const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.TypeInfo.Fn = @typeInfo(FunctionType).Fn; + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).Fn; const ArgsTuple = std.meta.ArgsTuple(FunctionType); pub fn callback( @@ -2924,7 +2924,7 @@ pub fn getterWrap(comptime Container: type, comptime name: string) GetterType(Co pub fn setterWrap(comptime Container: type, comptime name: string) SetterType(Container) { return struct { const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.TypeInfo.Fn = @typeInfo(FunctionType).Fn; + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).Fn; pub fn callback( this: *Container, @@ -3373,7 +3373,7 @@ pub fn wrapWithHasContainer( ) MethodType(Container, has_container) { return struct { const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.TypeInfo.Fn = @typeInfo(FunctionType).Fn; + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).Fn; const Args = std.meta.ArgsTuple(FunctionType); const eater = if (auto_protect) JSC.Node.ArgumentsSlice.protectEatNext else JSC.Node.ArgumentsSlice.nextEat; @@ -3571,7 +3571,7 @@ pub fn wrapInstanceMethod( ) InstanceMethodType(Container) { return struct { const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.TypeInfo.Fn = @typeInfo(FunctionType).Fn; + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).Fn; const Args = std.meta.ArgsTuple(FunctionType); const eater = if (auto_protect) JSC.Node.ArgumentsSlice.protectEatNext else JSC.Node.ArgumentsSlice.nextEat; @@ -3717,7 +3717,7 @@ pub fn wrapStaticMethod( ) JSC.Codegen.StaticCallbackType { return struct { const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.TypeInfo.Fn = @typeInfo(FunctionType).Fn; + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).Fn; const Args = std.meta.ArgsTuple(FunctionType); const eater = if (auto_protect) JSC.Node.ArgumentsSlice.protectEatNext else JSC.Node.ArgumentsSlice.nextEat; diff --git a/src/bun.js/bindings/header-gen.zig b/src/bun.js/bindings/header-gen.zig index 6fb426a279ca86..d0b24c8ce33675 100644 --- a/src/bun.js/bindings/header-gen.zig +++ b/src/bun.js/bindings/header-gen.zig @@ -1,10 +1,10 @@ const std = @import("std"); const Dir = std.fs.Dir; -const FnMeta = std.builtin.TypeInfo.Fn; -const FnDecl = std.builtin.TypeInfo.Declaration.Data.FnDecl; -const StructMeta = std.builtin.TypeInfo.Struct; -const EnumMeta = std.builtin.TypeInfo.Enum; -const UnionMeta = std.builtin.TypeInfo.Union; +const FnMeta = std.builtin.Type.Fn; +const FnDecl = std.builtin.Type.Declaration.Data.FnDecl; +const StructMeta = std.builtin.Type.Struct; +const EnumMeta = std.builtin.Type.Enum; +const UnionMeta = std.builtin.Type.Union; const warn = std.debug.warn; const StaticExport = @import("./static_export.zig"); const typeBaseName = @import("../../meta.zig").typeBaseName; @@ -178,7 +178,7 @@ pub const C_Generator = struct { self.write(")"); defer self.write(";\n"); - // const ReturnTypeInfo: std.builtin.TypeInfo = comptime @typeInfo(func.return_type); + // const ReturnTypeInfo: std.builtin.Type = comptime @typeInfo(func.return_type); // switch (comptime ReturnTypeInfo) { // .Pointer => |Pointer| { // self.write(" __attribute__((returns_nonnull))"); @@ -193,7 +193,7 @@ pub const C_Generator = struct { comptime Function: type, comptime name: []const u8, ) void { - const func: std.builtin.TypeInfo.Fn = @typeInfo(Function).Fn; + const func: std.builtin.Type.Fn = @typeInfo(Function).Fn; self.writeType(func.return_type orelse void); self.write(" (*" ++ name ++ ")("); inline for (func.args) |arg, i| { @@ -215,7 +215,7 @@ pub const C_Generator = struct { } self.write(")"); - // const ReturnTypeInfo: std.builtin.TypeInfo = comptime @typeInfo(func.return_type); + // const ReturnTypeInfo: std.builtin.Type = comptime @typeInfo(func.return_type); // switch (comptime ReturnTypeInfo) { // .Pointer => |Pointer| { // self.write(" __attribute__((returns_nonnull))"); @@ -432,7 +432,7 @@ pub const C_Generator = struct { }; const builtin = @import("builtin"); -const TypeInfo = builtin.TypeInfo; +const TypeInfo = builtin.Type; const Declaration = TypeInfo.Declaration; const GeneratorInterface = struct { @@ -554,7 +554,7 @@ pub fn HeaderGen(comptime first_import: type, comptime second_import: type, comp _: anytype, gen: *C_Generator, comptime ParentType: type, - comptime _: std.builtin.TypeInfo.Declaration, + comptime _: std.builtin.Type.Declaration, comptime name: []const u8, comptime prefix: []const u8, ) void { @@ -722,7 +722,7 @@ pub fn HeaderGen(comptime first_import: type, comptime second_import: type, comp @setEvalBranchQuota(99999); const Type = @field(BaseType, _decls.name); if (@TypeOf(Type) == type) { - const TypeTypeInfo: std.builtin.TypeInfo = @typeInfo(@field(BaseType, _decls.name)); + const TypeTypeInfo: std.builtin.Type = @typeInfo(@field(BaseType, _decls.name)); const is_container_type = switch (TypeTypeInfo) { .Opaque, .Struct, .Enum => true, else => false, @@ -911,16 +911,16 @@ pub fn HeaderGen(comptime first_import: type, comptime second_import: type, comp generated.writer().print( \\ #include "root.h" - \\ + \\ \\ #include \\ #include "DOMJITIDLConvert.h" \\ #include "DOMJITIDLType.h" \\ #include "DOMJITIDLTypeFilter.h" \\ #include "DOMJITHelpers.h" \\ #include - \\ + \\ \\ #include "JSDOMConvertBufferSource.h" - \\ + \\ \\ using namespace JSC; \\ using namespace WebCore; \\ diff --git a/src/bun.js/bindings/shimmer.zig b/src/bun.js/bindings/shimmer.zig index f31c894cb758f0..e2c91f46dfc6ad 100644 --- a/src/bun.js/bindings/shimmer.zig +++ b/src/bun.js/bindings/shimmer.zig @@ -36,7 +36,7 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp // return FromType; // } - // var ReturnTypeInfo: std.builtin.TypeInfo = @typeInfo(FromType); + // var ReturnTypeInfo: std.builtin.Type = @typeInfo(FromType); // if (ReturnTypeInfo == .Pointer and NewReturnType != *anyopaque) { // NewReturnType = ReturnTypeInfo.Pointer.child; @@ -110,7 +110,7 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp if (@typeInfo(Function) != .Fn) { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(Function) ++ " to be a function but received " ++ @tagName(@typeInfo(Function))); } - var Fn: std.builtin.TypeInfo.Fn = @typeInfo(Function).Fn; + var Fn: std.builtin.Type.Fn = @typeInfo(Function).Fn; if (Fn.calling_convention != .C) { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(Function) ++ " to have a C Calling Convention."); } @@ -139,7 +139,7 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp if (@typeInfo(Function) != .Fn) { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(Function) ++ " to be a function but received " ++ @tagName(@typeInfo(Function))); } - var Fn: std.builtin.TypeInfo.Fn = @typeInfo(Function).Fn; + var Fn: std.builtin.Type.Fn = @typeInfo(Function).Fn; if (Fn.calling_convention != .C) { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(Function) ++ " to have a C Calling Convention."); } diff --git a/src/bun.js/bindings/static_export.zig b/src/bun.js/bindings/static_export.zig index f8936f0dffa9f7..a3d47e56e25dc7 100644 --- a/src/bun.js/bindings/static_export.zig +++ b/src/bun.js/bindings/static_export.zig @@ -5,7 +5,7 @@ local_name: []const u8, Parent: type, -pub fn Decl(comptime this: *const @This()) std.builtin.TypeInfo.Declaration { +pub fn Decl(comptime this: *const @This()) std.builtin.Type.Declaration { return comptime std.meta.declarationInfo(this.Parent, this.local_name); } diff --git a/src/bun.js/node/node_fs_binding.zig b/src/bun.js/node/node_fs_binding.zig index 54db57732369df..110b3fc5a66b2f 100644 --- a/src/bun.js/node/node_fs_binding.zig +++ b/src/bun.js/node/node_fs_binding.zig @@ -26,7 +26,7 @@ fn callSync(comptime FunctionEnum: NodeFSFunctionEnum) NodeFSFunction { const Function = @field(JSC.Node.NodeFS, @tagName(FunctionEnum)); const FunctionType = @TypeOf(Function); - const function: std.builtin.TypeInfo.Fn = comptime @typeInfo(FunctionType).Fn; + const function: std.builtin.Type.Fn = comptime @typeInfo(FunctionType).Fn; comptime if (function.args.len != 3) @compileError("Expected 3 arguments"); const Arguments = comptime function.args[1].arg_type.?; const FormattedName = comptime [1]u8{std.ascii.toUpper(@tagName(FunctionEnum)[0])} ++ @tagName(FunctionEnum)[1..]; @@ -75,7 +75,7 @@ fn call(comptime Function: NodeFSFunctionEnum) NodeFSFunction { // const FunctionType = @TypeOf(Function); _ = Function; - // const function: std.builtin.TypeInfo.Fn = comptime @typeInfo(FunctionType).Fn; + // const function: std.builtin.Type.Fn = comptime @typeInfo(FunctionType).Fn; // comptime if (function.args.len != 3) @compileError("Expected 3 arguments"); // const Arguments = comptime function.args[2].arg_type orelse @compileError(std.fmt.comptimePrint("Function {s} expected to have an arg type at [2]", .{@typeName(FunctionType)})); // const Result = comptime function.return_type.?; diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 07ccc8decef225..89d466c4e0aac4 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -24,8 +24,8 @@ pub const Mode = if (Environment.isLinux) u32 else std.os.mode_t; const heap_allocator = bun.default_allocator; pub fn DeclEnum(comptime T: type) type { const fieldInfos = std.meta.declarations(T); - var enumFields: [fieldInfos.len]std.builtin.TypeInfo.EnumField = undefined; - var decls = [_]std.builtin.TypeInfo.Declaration{}; + var enumFields: [fieldInfos.len]std.builtin.Type.EnumField = undefined; + var decls = [_]std.builtin.Type.Declaration{}; inline for (fieldInfos) |field, i| { enumFields[i] = .{ .name = field.name, diff --git a/src/install/bit_set.zig b/src/install/bit_set.zig index 1e2501ba52829c..a5758ec912db73 100644 --- a/src/install/bit_set.zig +++ b/src/install/bit_set.zig @@ -227,7 +227,7 @@ pub fn IntegerBitSet(comptime size: u16) type { /// This set is good for sets with a larger size, but may use /// more bytes than necessary if your set is small. pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { - const mask_info: std.builtin.TypeInfo = @typeInfo(MaskIntType); + const mask_info: std.builtin.Type = @typeInfo(MaskIntType); // Make sure the mask int is indeed an int if (mask_info != .Int) @compileError("ArrayBitSet can only operate on integer masks, but was passed " ++ @typeName(MaskIntType)); diff --git a/src/js_ast.zig b/src/js_ast.zig index c0acd74da651b9..72900b8ff784ee 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -5814,7 +5814,7 @@ pub const Macro = struct { pub const ids: std.EnumArray(Tag, Expr.Data) = brk: { var list = std.EnumArray(Tag, Expr.Data).initFill(Expr.Data{ .e_number = E.Number{ .value = 0.0 } }); - const fields: []const std.builtin.TypeInfo.EnumField = @typeInfo(Tag).Enum.fields; + const fields: []const std.builtin.Type.EnumField = @typeInfo(Tag).Enum.fields; for (fields) |field| { list.set(@intToEnum(Tag, field.value), Expr.Data{ .e_number = E.Number{ .value = @intToFloat(f64, field.value) } }); } @@ -5990,7 +5990,7 @@ pub const Macro = struct { }; pub const max_tag: u8 = brk: { - const Enum: std.builtin.TypeInfo.Enum = @typeInfo(Tag).Enum; + const Enum: std.builtin.Type.Enum = @typeInfo(Tag).Enum; var max_value: u8 = 0; for (Enum.fields) |field| { max_value = std.math.max(@as(u8, field.value), max_value); @@ -5999,7 +5999,7 @@ pub const Macro = struct { }; pub const min_tag: u8 = brk: { - const Enum: std.builtin.TypeInfo.Enum = @typeInfo(Tag).Enum; + const Enum: std.builtin.Type.Enum = @typeInfo(Tag).Enum; var min: u8 = 255; for (Enum.fields) |field| { min = std.math.min(@as(u8, field.value), min); diff --git a/src/js_lexer/identifier_cache.zig b/src/js_lexer/identifier_cache.zig index 59cd9580c898ba..7f612dffe84648 100644 --- a/src/js_lexer/identifier_cache.zig +++ b/src/js_lexer/identifier_cache.zig @@ -10,7 +10,7 @@ pub const CachedBitset = extern struct { }; pub fn setMasks(masks: [*:0]const u8, comptime MaskType: type, masky: MaskType) void { - const FieldInfo: std.builtin.TypeInfo.StructField = std.meta.fieldInfo(MaskType, "masks"); + const FieldInfo: std.builtin.Type.StructField = std.meta.fieldInfo(MaskType, "masks"); masky.masks = @bitCast(masks, FieldInfo.field_type); } diff --git a/src/json_parser.zig b/src/json_parser.zig index f690344205d835..9bd7422db97105 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -524,7 +524,7 @@ pub fn toAST( comptime Type: type, value: Type, ) anyerror!js_ast.Expr { - const type_info: std.builtin.TypeInfo = @typeInfo(Type); + const type_info: std.builtin.Type = @typeInfo(Type); switch (type_info) { .Bool => { @@ -592,7 +592,7 @@ pub fn toAST( return Expr.init(js_ast.E.Array, js_ast.E.Array{ .items = exprs }, logger.Loc.Empty); }, .Struct => |Struct| { - const fields: []const std.builtin.TypeInfo.StructField = Struct.fields; + const fields: []const std.builtin.Type.StructField = Struct.fields; var properties = try allocator.alloc(js_ast.G.Property, fields.len); var property_i: usize = 0; inline for (fields) |field| { diff --git a/src/meta.zig b/src/meta.zig index c104e378d0b921..cd171cc29c80a2 100644 --- a/src/meta.zig +++ b/src/meta.zig @@ -7,7 +7,7 @@ pub fn ReturnOf(comptime function: anytype) type { } pub fn ReturnOfType(comptime Type: type) type { - const typeinfo: std.builtin.TypeInfo.Fn = @typeInfo(Type); + const typeinfo: std.builtin.Type.Fn = @typeInfo(Type); return typeinfo.return_type orelse void; } diff --git a/src/router.zig b/src/router.zig index fa1a6d13b275ac..acc9b8be343bfc 100644 --- a/src/router.zig +++ b/src/router.zig @@ -887,7 +887,7 @@ fn makeTest(cwd_path: string, data: anytype) !void { try cwd.setAsCwd(); const Data = @TypeOf(data); - const fields: []const std.builtin.TypeInfo.StructField = comptime std.meta.fields(Data); + const fields: []const std.builtin.Type.StructField = comptime std.meta.fields(Data); inline for (fields) |field| { @setEvalBranchQuota(9999); const value = @field(data, field.name); diff --git a/src/tagged_pointer.zig b/src/tagged_pointer.zig index dd621543975f62..d5b63c0e28f2fb 100644 --- a/src/tagged_pointer.zig +++ b/src/tagged_pointer.zig @@ -54,8 +54,8 @@ pub const TaggedPointer = packed struct { pub fn TaggedPointerUnion(comptime Types: anytype) type { const TagType: type = tag_break: { if (std.meta.trait.isIndexable(@TypeOf(Types))) { - var enumFields: [Types.len]std.builtin.TypeInfo.EnumField = undefined; - var decls = [_]std.builtin.TypeInfo.Declaration{}; + var enumFields: [Types.len]std.builtin.Type.EnumField = undefined; + var decls = [_]std.builtin.Type.Declaration{}; inline for (Types) |field, i| { enumFields[i] = .{ @@ -74,9 +74,9 @@ pub fn TaggedPointerUnion(comptime Types: anytype) type { }, }); } else { - const Fields: []const std.builtin.TypeInfo.StructField = std.meta.fields(@TypeOf(Types)); - var enumFields: [Fields.len]std.builtin.TypeInfo.EnumField = undefined; - var decls = [_]std.builtin.TypeInfo.Declaration{}; + const Fields: []const std.builtin.Type.StructField = std.meta.fields(@TypeOf(Types)); + var enumFields: [Fields.len]std.builtin.Type.EnumField = undefined; + var decls = [_]std.builtin.Type.Declaration{}; inline for (Fields) |field, i| { enumFields[i] = .{ From eec7402250c0ee5b52ea5b4451d2e6682dce1ff6 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 03:55:53 +0100 Subject: [PATCH 36/51] Fix: Makefile flags --- Makefile | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index de67d4b74f60c9..994123c356594f 100644 --- a/Makefile +++ b/Makefile @@ -64,6 +64,7 @@ WEBKIT_RELEASE_DIR_LTO ?= $(WEBKIT_DIR)/WebKitBuild/ReleaseLTO NPM_CLIENT ?= $(shell which bun || which npm) ZIG ?= $(shell which zig || echo -e "error: Missing zig. Please make sure zig is in PATH. Or set ZIG=/path/to-zig-executable") +ZIG_FLAGS ?= "-fstage1" # We must use the same compiler version for the JavaScriptCore bindings and JavaScriptCore # If we don't do this, strange memory allocation failures occur. @@ -377,7 +378,7 @@ endif SHARED_LIB_EXTENSION = .so JSC_BINDINGS = $(BINDINGS_OBJ) $(JSC_FILES) -JSC_BINDINGS_DEBUG = $(DEBUG_BINDINGS_OBJ) $(JSC_FILES_DEBUG) +JSC_BINDINGS_DEBUG = $(DEBUG_BINDINGS_OBJ) $(JSC_FILES_DEBUG) RELEASE_FLAGS= DEBUG_FLAGS= @@ -405,7 +406,7 @@ ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \ -lusockets \ $(BUN_DEPS_OUT_DIR)/libuwsockets.o -ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO) +ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO) STATIC_MUSL_FLAG ?= @@ -426,9 +427,9 @@ PLATFORM_LINKER_FLAGS = $(BUN_CFLAGS) \ -flto \ -Wl,--allow-multiple-definition \ -rdynamic - - + + endif @@ -870,7 +871,7 @@ CLANG_FORMAT := $(shell command -v clang-format 2> /dev/null) headers: rm -f /tmp/build-jsc-headers src/bun.js/bindings/headers.zig touch src/bun.js/bindings/headers.zig - $(ZIG) build headers-obj + $(ZIG) build headers-obj $(ZIG_FLAGS) $(CXX) $(PLATFORM_LINKER_FLAGS) $(JSC_FILES_DEBUG) ${ICU_FLAGS} $(DEBUG_IO_FILES) $(BUN_LLD_FLAGS_WITHOUT_JSC) -g $(DEBUG_BIN)/headers.o -W -o /tmp/build-jsc-headers -lc; /tmp/build-jsc-headers $(ZIG) translate-c src/bun.js/bindings/headers.h > src/bun.js/bindings/headers.zig @@ -1681,7 +1682,7 @@ webcrypto: sizegen: mkdir -p $(BUN_TMP_DIR) - $(CXX) src/bun.js/headergen/sizegen.cpp -Wl,-dead_strip -Wl,-dead_strip_dylibs -fuse-ld=lld -o $(BUN_TMP_DIR)/sizegen $(CLANG_FLAGS) -O1 + $(CXX) src/bun.js/headergen/sizegen.cpp -Wl,-dead_strip -Wl,-dead_strip_dylibs -fuse-ld=lld -o $(BUN_TMP_DIR)/sizegen $(CLANG_FLAGS) -O1 $(BUN_TMP_DIR)/sizegen > src/bun.js/bindings/sizes.zig From 878102cc99d6b6640693d9ee2a4fe4974d6c922c Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 03:56:44 +0100 Subject: [PATCH 37/51] Fix: optional string fmt --- src/bun.js/webcore/streams.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 395ded5cf3ac20..05ad02525168f7 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -2357,7 +2357,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { // In this case, it's always an error pub fn end(this: *@This(), err: ?Syscall.Error) JSC.Node.Maybe(void) { - log("end({s})", .{err}); + log("end({?s})", .{err}); if (this.requested_end) { return .{ .result = {} }; From 580107d0011999e48db0bbeda0f8550fa9d0743f Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 04:23:56 +0100 Subject: [PATCH 38/51] Fix: fmt {s} -> {any} --- build.zig | 16 +-- misctools/fetch.zig | 4 +- misctools/http_bench.zig | 16 +-- misctools/readlink-getfd.zig | 4 +- misctools/readlink-realpath.zig | 2 +- misctools/tgz.zig | 2 +- src/__global.zig | 4 +- src/baby_list.zig | 2 +- src/bench/string-handling.zig | 12 +-- src/bun.js/api/bun.zig | 36 +++---- src/bun.js/api/ffi.zig | 36 +++---- src/bun.js/api/html_rewriter.zig | 4 +- src/bun.js/api/server.zig | 22 ++-- src/bun.js/api/transpiler.zig | 14 +-- src/bun.js/base.zig | 30 +++--- src/bun.js/event_loop.zig | 2 +- src/bun.js/javascript.zig | 54 +++++----- src/bun.js/node/node_fs_binding.zig | 6 +- src/bun.js/node/node_fs_constant.zig | 2 +- src/bun.js/node/types.zig | 6 +- src/bun.js/test/jest.zig | 2 +- src/bun.js/uuid.zig | 2 +- src/bun.js/webcore.zig | 4 +- src/bun.js/webcore/streams.zig | 2 +- src/bun_js.zig | 2 +- src/bundler.zig | 14 +-- src/bunfig.zig | 4 +- src/cache.zig | 10 +- src/cli.zig | 16 +-- src/cli/bun_command.zig | 6 +- src/cli/create_command.zig | 106 +++++++++---------- src/cli/init_command.zig | 12 +-- src/cli/install_completions_command.zig | 14 +-- src/cli/package_manager_command.zig | 20 ++-- src/cli/run_command.zig | 42 ++++---- src/cli/test_command.zig | 8 +- src/cli/upgrade_command.zig | 62 +++++------ src/comptime_string_map.zig | 4 +- src/css_scanner.zig | 2 +- src/defines.zig | 4 +- src/deps/boringssl.translated.zig | 2 +- src/deps/picohttp.zig | 22 ++-- src/deps/zig-clap/clap.zig | 28 ++--- src/deps/zig-datetime/src/datetime.zig | 8 +- src/env_loader.zig | 10 +- src/fs.zig | 14 +-- src/http.zig | 130 ++++++++++++------------ src/http/websocket_http_client.zig | 6 +- src/http_client_async.zig | 16 +-- src/io/io_darwin.zig | 2 +- src/js_ast.zig | 28 ++--- src/js_lexer.zig | 22 ++-- src/js_parser.zig | 116 ++++++++++----------- src/js_printer.zig | 22 ++-- src/jsc.zig | 2 +- src/json_parser.zig | 8 +- src/linear_fifo.zig | 2 +- src/linker.zig | 30 +++--- src/logger.zig | 10 +- src/meta.zig | 2 +- src/napi/napi.zig | 4 +- src/network_thread.zig | 2 +- src/node_module_bundle.zig | 10 +- src/open.zig | 12 +-- src/options.zig | 26 ++--- src/pool.zig | 4 +- src/renamer.zig | 2 +- src/report.zig | 36 +++---- src/router.zig | 18 ++-- src/runtime.zig | 2 +- src/string_immutable.zig | 2 +- src/test/tester.zig | 14 +-- src/toml/toml_lexer.zig | 8 +- src/url.zig | 6 +- src/watcher.zig | 6 +- 75 files changed, 621 insertions(+), 621 deletions(-) diff --git a/build.zig b/build.zig index b20e51a8f2f82e..37d73f3ef39755 100644 --- a/build.zig +++ b/build.zig @@ -187,7 +187,7 @@ const BunBuildOptions = struct { var output_dir: []const u8 = ""; fn panicIfNotFound(comptime filepath: []const u8) []const u8 { var file = std.fs.cwd().openFile(filepath, .{ .mode = .read_only }) catch |err| { - std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) }); + std.debug.panic("error: {any} opening {any}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) }); }; file.close(); @@ -274,7 +274,7 @@ pub fn build(b: *std.build.Builder) !void { if (std.os.getenv("OUTPUT_DIR")) |output_dir_| { output_dir = output_dir_; } else { - const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet }); + const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{any}{any}", .{ bin_label, triplet }); output_dir = b.pathFromRoot(output_dir_base); } @@ -304,9 +304,9 @@ pub fn build(b: *std.build.Builder) !void { else .{ .major = 0, .minor = 0, .patch = 0 }; // exe.want_lto = true; - defer b.default_step.dependOn(&b.addLog("Output: {s}/{s}\n", .{ output_dir, bun_executable_name }).step); + defer b.default_step.dependOn(&b.addLog("Output: {any}/{any}\n", .{ output_dir, bun_executable_name }).step); defer b.default_step.dependOn(&b.addLog( - "Build {s} v{} - v{}\n", + "Build {any} v{} - v{}\n", .{ triplet, min_version, @@ -377,7 +377,7 @@ pub fn build(b: *std.build.Builder) !void { { obj_step.dependOn(&b.addLog( - "Build {s} v{} - v{} ({s})\n", + "Build {any} v{} - v{} ({any})\n", .{ triplet, min_version, @@ -415,7 +415,7 @@ pub fn build(b: *std.build.Builder) !void { obj.link_function_sections = true; } - var log_step = b.addLog("Destination: {s}/{s}\n", .{ output_dir, bun_executable_name }); + var log_step = b.addLog("Destination: {any}/{any}\n", .{ output_dir, bun_executable_name }); log_step.step.dependOn(&obj.step); } @@ -515,7 +515,7 @@ pub fn build(b: *std.build.Builder) !void { try linkObjectFiles(b, headers_obj, target); { - var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{"bun"}); + var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{any} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{"bun"}); var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "–––---\n\n" ++ "\x1b[0m", .{}); headers_step.dependOn(&before.step); headers_step.dependOn(&headers_obj.step); @@ -539,7 +539,7 @@ pub fn build(b: *std.build.Builder) !void { try test_.packages.appendSlice(children); } - var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name}); + var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{any} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name}); var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "–––---\n\n" ++ "\x1b[0m", .{}); headers_step.dependOn(&before.step); headers_step.dependOn(&test_.step); diff --git a/misctools/fetch.zig b/misctools/fetch.zig index 0b990572f4900e..2828f0a9957eda 100644 --- a/misctools/fetch.zig +++ b/misctools/fetch.zig @@ -114,12 +114,12 @@ pub const Arguments = struct { var absolute_path_ = file_path_buf[0..absolute_path_len :0]; var body_file = std.fs.openFileAbsoluteZ(absolute_path_, .{ .mode = .read_only }) catch |err| { - Output.printErrorln("{s} opening file {s}", .{ @errorName(err), absolute_path }); + Output.printErrorln("{any} opening file {any}", .{ @errorName(err), absolute_path }); Global.exit(1); }; var file_contents = body_file.readToEndAlloc(allocator, try body_file.getEndPos()) catch |err| { - Output.printErrorln("{s} reading file {s}", .{ @errorName(err), absolute_path }); + Output.printErrorln("{any} reading file {any}", .{ @errorName(err), absolute_path }); Global.exit(1); }; body_string = file_contents; diff --git a/misctools/http_bench.zig b/misctools/http_bench.zig index acf3aac317e908..5064a7da795c1e 100644 --- a/misctools/http_bench.zig +++ b/misctools/http_bench.zig @@ -120,12 +120,12 @@ pub const Arguments = struct { var absolute_path_ = file_path_buf[0..absolute_path_len :0]; var body_file = std.fs.openFileAbsoluteZ(absolute_path_, .{ .mode = .read_only }) catch |err| { - Output.printErrorln("{s} opening file {s}", .{ @errorName(err), absolute_path }); + Output.printErrorln("{any} opening file {any}", .{ @errorName(err), absolute_path }); Global.exit(1); }; var file_contents = body_file.readToEndAlloc(allocator, try body_file.getEndPos()) catch |err| { - Output.printErrorln("{s} reading file {s}", .{ @errorName(err), absolute_path }); + Output.printErrorln("{any} reading file {any}", .{ @errorName(err), absolute_path }); Global.exit(1); }; body_string = file_contents; @@ -166,11 +166,11 @@ pub const Arguments = struct { .concurrency = std.fmt.parseInt(u16, args.option("--max-concurrency") orelse "32", 10) catch 32, .turbo = args.flag("--turbo"), .timeout = std.fmt.parseInt(usize, args.option("--timeout") orelse "0", 10) catch |err| { - Output.prettyErrorln("{s} parsing timeout", .{@errorName(err)}); + Output.prettyErrorln("{any} parsing timeout", .{@errorName(err)}); Global.exit(1); }, .count = std.fmt.parseInt(usize, args.option("--count") orelse "10", 10) catch |err| { - Output.prettyErrorln("{s} parsing count", .{@errorName(err)}); + Output.prettyErrorln("{any} parsing count", .{@errorName(err)}); Global.exit(1); }, }; @@ -265,7 +265,7 @@ pub fn main() anyerror!void { } if (http.gzip_elapsed > 0) { - Output.prettyError(" {s} - {s} ({d} bytes, ", .{ + Output.prettyError(" {any} - {any} ({d} bytes, ", .{ @tagName(http.client.method), http.client.url.href, http.response_buffer.list.items.len, @@ -273,7 +273,7 @@ pub fn main() anyerror!void { Output.printElapsed(@floatCast(f64, @intToFloat(f128, http.gzip_elapsed) / std.time.ns_per_ms)); Output.prettyError(" gzip)\n", .{}); } else { - Output.prettyError(" {s} - {s} ({d} bytes)\n", .{ + Output.prettyError(" {any} - {any} ({d} bytes)\n", .{ @tagName(http.client.method), http.client.url.href, http.response_buffer.list.items.len, @@ -281,10 +281,10 @@ pub fn main() anyerror!void { } } else if (http.err) |err| { fail_count += 1; - Output.printError(" err: {s}\n", .{@errorName(err)}); + Output.printError(" err: {any}\n", .{@errorName(err)}); } else { fail_count += 1; - Output.prettyError(" Uh-oh: {s}\n", .{@tagName(http.state.loadUnchecked())}); + Output.prettyError(" Uh-oh: {any}\n", .{@tagName(http.state.loadUnchecked())}); } Output.flush(); diff --git a/misctools/readlink-getfd.zig b/misctools/readlink-getfd.zig index 4ec18e224b8d5f..a9f50eff9cdbe7 100644 --- a/misctools/readlink-getfd.zig +++ b/misctools/readlink-getfd.zig @@ -49,7 +49,7 @@ pub fn main() anyerror!void { var file = std.os.openZ(joined_z, O_PATH | std.os.O.CLOEXEC, 0) catch |err| { switch (err) { error.NotDir, error.FileNotFound => { - Output.prettyError("404 Not Found: \"{s}\"", .{joined_z}); + Output.prettyError("404 Not Found: \"{any}\"", .{joined_z}); Global.exit(1); }, else => { @@ -62,5 +62,5 @@ pub fn main() anyerror!void { file.close(); } - Output.print("{s}", .{path}); + Output.print("{any}", .{path}); } diff --git a/misctools/readlink-realpath.zig b/misctools/readlink-realpath.zig index f683f986c9db7a..6e28592e817915 100644 --- a/misctools/readlink-realpath.zig +++ b/misctools/readlink-realpath.zig @@ -35,5 +35,5 @@ pub fn main() anyerror!void { path = try std.os.realpathZ(to_resolve, &out_buffer); } - Output.print("{s}", .{path}); + Output.print("{any}", .{path}); } diff --git a/misctools/tgz.zig b/misctools/tgz.zig index 5ec619dec278c1..69e3c76aead837 100644 --- a/misctools/tgz.zig +++ b/misctools/tgz.zig @@ -48,7 +48,7 @@ pub fn main() anyerror!void { }; const tarball_path = path_handler.joinAbsStringBuf(try std.process.getCwdAlloc(std.heap.c_allocator), &tarball_path_buf, &parts, .auto); - Output.prettyErrorln("Tarball Path: {s}", .{tarball_path}); + Output.prettyErrorln("Tarball Path: {any}", .{tarball_path}); var folder = basename; // var dir = try std.fs.cwd().makeOpenPath(folder, .{ .iterate = true }); diff --git a/src/__global.zig b/src/__global.zig index 70d9e8b6860cd3..79f80458f69946 100644 --- a/src/__global.zig +++ b/src/__global.zig @@ -16,9 +16,9 @@ else pub const package_json_version_with_sha = if (Environment.git_sha.len == 0) package_json_version else if (Environment.isDebug) - std.fmt.comptimePrint(BASE_VERSION ++ ".{d}_debug ({s})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }) + std.fmt.comptimePrint(BASE_VERSION ++ ".{d}_debug ({any})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }) else - std.fmt.comptimePrint(BASE_VERSION ++ ".{d} ({s})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }); + std.fmt.comptimePrint(BASE_VERSION ++ ".{d} ({any})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }); pub const os_name = if (Environment.isWindows) "win32" diff --git a/src/baby_list.zig b/src/baby_list.zig index 0c30e7f258b909..44d404b27bb2f8 100644 --- a/src/baby_list.zig +++ b/src/baby_list.zig @@ -166,7 +166,7 @@ const bun = @import("./global.zig"); // pub fn writeLatin1(this: *Delayer, list_: BabyList(u8), str: []const u8, allocator: std.mem.Allocator) !BabyList(u8) { // var list = list_; -// log("writeLatin1({any}, {s})", .{ .delayer = this, .str = str }); +// log("writeLatin1({any}, {any})", .{ .delayer = this, .str = str }); // { // switch (this.last_encoding) { diff --git a/src/bench/string-handling.zig b/src/bench/string-handling.zig index b3dda0ca3074ae..0f3b82f91dd095 100644 --- a/src/bench/string-handling.zig +++ b/src/bench/string-handling.zig @@ -21,9 +21,9 @@ pub fn main() anyerror!void { } if (index == std.math.maxInt(usize)) { - std.debug.print("manual [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); + std.debug.print("manual [{d} byte file] {any} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); } else { - std.debug.print("manual [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); + std.debug.print("manual [{d} byte file] {any} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); } } @@ -38,9 +38,9 @@ pub fn main() anyerror!void { } if (index == std.math.maxInt(usize)) { - std.debug.print("memcpy [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); + std.debug.print("memcpy [{d} byte file] {any} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); } else { - std.debug.print("memcpy [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); + std.debug.print("memcpy [{d} byte file] {any} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); } } @@ -56,9 +56,9 @@ pub fn main() anyerror!void { } if (index == std.math.maxInt(usize)) { - std.debug.print("ArrayList [{d} byte file] {s} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); + std.debug.print("ArrayList [{d} byte file] {any} NOT found in {}\n", .{ contents.len, find, std.fmt.fmtDuration(timer.read()) }); } else { - std.debug.print("ArrayList [{d} byte file] {s} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); + std.debug.print("ArrayList [{d} byte file] {any} found at {d} in {}\n", .{ contents.len, find, index, std.fmt.fmtDuration(timer.read()) }); } } } diff --git a/src/bun.js/api/bun.zig b/src/bun.js/api/bun.zig index f8f7b55a886598..e37ed08621a4b4 100644 --- a/src/bun.js/api/bun.zig +++ b/src/bun.js/api/bun.zig @@ -307,7 +307,7 @@ pub fn registerMacro( } if (!arguments[1].?.value().isCell() or !arguments[1].?.value().isCallable(ctx.vm())) { - JSError(getAllocator(ctx), "Macro must be a function. Received: {s}", .{@tagName(js.JSValueGetType(ctx, arguments[1]))}, ctx, exception); + JSError(getAllocator(ctx), "Macro must be a function. Received: {any}", .{@tagName(js.JSValueGetType(ctx, arguments[1]))}, ctx, exception); return js.JSValueMakeUndefined(ctx); } @@ -591,26 +591,26 @@ pub fn readFileAsStringCallback( ) js.JSValueRef { const path = buf_z.ptr[0..buf_z.len]; var file = std.fs.cwd().openFileZ(buf_z, .{ .mode = .read_only }) catch |err| { - JSError(getAllocator(ctx), "Opening file {s} for path: \"{s}\"", .{ @errorName(err), path }, ctx, exception); + JSError(getAllocator(ctx), "Opening file {any} for path: \"{any}\"", .{ @errorName(err), path }, ctx, exception); return js.JSValueMakeUndefined(ctx); }; defer file.close(); const stat = file.stat() catch |err| { - JSError(getAllocator(ctx), "Getting file size {s} for \"{s}\"", .{ @errorName(err), path }, ctx, exception); + JSError(getAllocator(ctx), "Getting file size {any} for \"{any}\"", .{ @errorName(err), path }, ctx, exception); return js.JSValueMakeUndefined(ctx); }; if (stat.kind != .File) { - JSError(getAllocator(ctx), "Can't read a {s} as a string (\"{s}\")", .{ @tagName(stat.kind), path }, ctx, exception); + JSError(getAllocator(ctx), "Can't read a {any} as a string (\"{any}\")", .{ @tagName(stat.kind), path }, ctx, exception); return js.JSValueMakeUndefined(ctx); } var contents_buf = VirtualMachine.vm.allocator.alloc(u8, stat.size + 2) catch unreachable; // OOM defer VirtualMachine.vm.allocator.free(contents_buf); const contents_len = file.readAll(contents_buf) catch |err| { - JSError(getAllocator(ctx), "{s} reading file (\"{s}\")", .{ @errorName(err), path }, ctx, exception); + JSError(getAllocator(ctx), "{any} reading file (\"{any}\")", .{ @errorName(err), path }, ctx, exception); return js.JSValueMakeUndefined(ctx); }; @@ -631,26 +631,26 @@ pub fn readFileAsBytesCallback( const path = buf_z.ptr[0..buf_z.len]; var file = std.fs.cwd().openFileZ(buf_z, .{ .mode = .read_only }) catch |err| { - JSError(getAllocator(ctx), "Opening file {s} for path: \"{s}\"", .{ @errorName(err), path }, ctx, exception); + JSError(getAllocator(ctx), "Opening file {any} for path: \"{any}\"", .{ @errorName(err), path }, ctx, exception); return js.JSValueMakeUndefined(ctx); }; defer file.close(); const stat = file.stat() catch |err| { - JSError(getAllocator(ctx), "Getting file size {s} for \"{s}\"", .{ @errorName(err), path }, ctx, exception); + JSError(getAllocator(ctx), "Getting file size {any} for \"{any}\"", .{ @errorName(err), path }, ctx, exception); return js.JSValueMakeUndefined(ctx); }; if (stat.kind != .File) { - JSError(getAllocator(ctx), "Can't read a {s} as a string (\"{s}\")", .{ @tagName(stat.kind), path }, ctx, exception); + JSError(getAllocator(ctx), "Can't read a {any} as a string (\"{any}\")", .{ @tagName(stat.kind), path }, ctx, exception); return js.JSValueMakeUndefined(ctx); } var contents_buf = VirtualMachine.vm.allocator.alloc(u8, stat.size + 2) catch unreachable; // OOM errdefer VirtualMachine.vm.allocator.free(contents_buf); const contents_len = file.readAll(contents_buf) catch |err| { - JSError(getAllocator(ctx), "{s} reading file (\"{s}\")", .{ @errorName(err), path }, ctx, exception); + JSError(getAllocator(ctx), "{any} reading file (\"{any}\")", .{ @errorName(err), path }, ctx, exception); return js.JSValueMakeUndefined(ctx); }; @@ -743,7 +743,7 @@ pub fn openInEditor( editor_choice = edit.editor; if (editor_choice == null) { edit.* = prev; - JSError(getAllocator(ctx), "Could not find editor \"{s}\"", .{sliced.slice()}, ctx, exception); + JSError(getAllocator(ctx), "Could not find editor \"{any}\"", .{sliced.slice()}, ctx, exception); return js.JSValueMakeUndefined(ctx); } else if (edit.name.ptr == edit.path.ptr) { edit.name = bun.default_allocator.dupe(u8, edit.path) catch unreachable; @@ -778,7 +778,7 @@ pub fn openInEditor( } editor.open(edit.path, path, line, column, bun.default_allocator) catch |err| { - JSC.JSError(bun.default_allocator, "Opening editor failed {s}", .{@errorName(err)}, ctx, exception); + JSC.JSError(bun.default_allocator, "Opening editor failed {any}", .{@errorName(err)}, ctx, exception); return null; }; @@ -1049,7 +1049,7 @@ pub fn readAllStdinSync( var stdin = std.io.getStdIn(); var result = stdin.readToEndAlloc(allocator, std.math.maxInt(u32)) catch |err| { - JSError(undefined, "{s} reading stdin", .{@errorName(err)}, ctx, exception); + JSError(undefined, "{any} reading stdin", .{@errorName(err)}, ctx, exception); return null; }; var out = ZigString.init(result); @@ -1375,7 +1375,7 @@ pub const Crypto = struct { switch (string_or_buffer) { .string => |str| { const encoding = JSC.Node.Encoding.from(str) orelse { - globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str}); + globalThis.throwInvalidArguments("Unknown encoding: {any}", .{str}); return JSC.JSValue.zero; }; @@ -1428,7 +1428,7 @@ pub const Crypto = struct { switch (string_or_buffer) { .string => |str| { const encoding = JSC.Node.Encoding.from(str) orelse { - globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str}); + globalThis.throwInvalidArguments("Unknown encoding: {any}", .{str}); return JSC.JSValue.zero; }; @@ -2124,19 +2124,19 @@ pub const Unsafe = struct { // defer getAllocator(ctx).destroy(lockfile); // switch (cause.step) { // .open_file => { -// JSError(undefined, "error opening lockfile: {s}", .{ +// JSError(undefined, "error opening lockfile: {any}", .{ // @errorName(cause.value), // }, ctx, exception); // return null; // }, // .parse_file => { -// JSError(undefined, "error parsing lockfile: {s}", .{ +// JSError(undefined, "error parsing lockfile: {any}", .{ // @errorName(cause.value), // }, ctx, exception); // return null; // }, // .read_file => { -// JSError(undefined, "error reading lockfile: {s}", .{ +// JSError(undefined, "error reading lockfile: {any}", .{ // @errorName(cause.value), // }, ctx, exception); // return null; @@ -2934,7 +2934,7 @@ pub const FFI = struct { } const array_buffer = value.asArrayBuffer(globalThis) orelse { - return JSC.toInvalidArguments("Expected ArrayBufferView but received {s}", .{@tagName(value.jsType())}, globalThis); + return JSC.toInvalidArguments("Expected ArrayBufferView but received {any}", .{@tagName(value.jsType())}, globalThis); }; if (array_buffer.len == 0) { diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig index 269557bcc7454b..7d97d50dc13a14 100644 --- a/src/bun.js/api/ffi.zig +++ b/src/bun.js/api/ffi.zig @@ -331,7 +331,7 @@ pub const FFI = struct { // optional if the user passed "ptr" if (function.symbol_from_dynamic_library == null) { var resolved_symbol = dylib.lookup(*anyopaque, function_name) orelse { - const ret = JSC.toInvalidArguments("Symbol \"{s}\" not found in \"{s}\"", .{ std.mem.span(function_name), name_slice.slice() }, global); + const ret = JSC.toInvalidArguments("Symbol \"{any}\" not found in \"{any}\"", .{ std.mem.span(function_name), name_slice.slice() }, global); for (symbols.values()) |*value| { allocator.free(bun.constStrToU8(std.mem.span(value.base_name.?))); value.arg_types.clearAndFree(allocator); @@ -345,7 +345,7 @@ pub const FFI = struct { } function.compile(allocator) catch |err| { - const ret = JSC.toInvalidArguments("{s} when compiling symbol \"{s}\" in \"{s}\"", .{ + const ret = JSC.toInvalidArguments("{any} when compiling symbol \"{any}\" in \"{any}\"", .{ std.mem.span(@errorName(err)), std.mem.span(function_name), name_slice.slice(), @@ -434,7 +434,7 @@ pub const FFI = struct { const function_name = function.base_name.?; if (function.symbol_from_dynamic_library == null) { - const ret = JSC.toInvalidArguments("Symbol for \"{s}\" not found", .{std.mem.span(function_name)}, global); + const ret = JSC.toInvalidArguments("Symbol for \"{any}\" not found", .{std.mem.span(function_name)}, global); for (symbols.values()) |*value| { allocator.free(bun.constStrToU8(std.mem.span(value.base_name.?))); value.arg_types.clearAndFree(allocator); @@ -444,7 +444,7 @@ pub const FFI = struct { } function.compile(allocator) catch |err| { - const ret = JSC.toInvalidArguments("{s} when compiling symbol \"{s}\"", .{ + const ret = JSC.toInvalidArguments("{any} when compiling symbol \"{any}\"", .{ std.mem.span(@errorName(err)), std.mem.span(function_name), }, global); @@ -544,7 +544,7 @@ pub const FFI = struct { defer type_name.deinit(); abi_types.appendAssumeCapacity(ABIType.label.get(type_name.slice()) orelse { abi_types.clearAndFree(allocator); - return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Unknown type {s}", .{type_name.slice()}, global); + return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Unknown type {any}", .{type_name.slice()}, global); }); } } @@ -576,7 +576,7 @@ pub const FFI = struct { defer ret_slice.deinit(); return_type = ABIType.label.get(ret_slice.slice()) orelse { abi_types.clearAndFree(allocator); - return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Unknown return type {s}", .{ret_slice.slice()}, global); + return JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_ARG_VALUE, "Unknown return type {any}", .{ret_slice.slice()}, global); }; } @@ -1143,7 +1143,7 @@ pub const FFI = struct { try this.return_type.typename(writer); try writer.writeAll(" return_value = "); } - try writer.print("{s}(", .{std.mem.span(this.base_name.?)}); + try writer.print("{any}(", .{std.mem.span(this.base_name.?)}); first = true; arg_buf[0..3].* = "arg".*; for (this.arg_types.items) |arg, i| { @@ -1296,7 +1296,7 @@ pub const FFI = struct { const len = inner_buf.len + 1; inner_buf = inner_buf_[0..len]; inner_buf[0] = '_'; - try writer.print("return {s}", .{this.return_type.toCExact(inner_buf)}); + try writer.print("return {any}", .{this.return_type.toCExact(inner_buf)}); } try writer.writeAll(";\n}\n\n"); @@ -1449,7 +1449,7 @@ pub const FFI = struct { }, .char, .int8_t, .uint8_t, .int16_t, .uint16_t, .int32_t, .uint32_t => { if (self.exact) - try writer.print("({s})", .{std.mem.span(@tagName(self.tag))}); + try writer.print("({any})", .{std.mem.span(@tagName(self.tag))}); try writer.writeAll("JSVALUE_TO_INT32("); }, @@ -1498,31 +1498,31 @@ pub const FFI = struct { switch (self.tag) { .void => {}, .bool => { - try writer.print("BOOLEAN_TO_JSVALUE({s})", .{self.symbol}); + try writer.print("BOOLEAN_TO_JSVALUE({any})", .{self.symbol}); }, .char, .int8_t, .uint8_t, .int16_t, .uint16_t, .int32_t => { - try writer.print("INT32_TO_JSVALUE((int32_t){s})", .{self.symbol}); + try writer.print("INT32_TO_JSVALUE((int32_t){any})", .{self.symbol}); }, .uint32_t, .i64_fast => { - try writer.print("INT64_TO_JSVALUE(JS_GLOBAL_OBJECT, (int64_t){s})", .{self.symbol}); + try writer.print("INT64_TO_JSVALUE(JS_GLOBAL_OBJECT, (int64_t){any})", .{self.symbol}); }, .int64_t => { - try writer.print("INT64_TO_JSVALUE_SLOW(JS_GLOBAL_OBJECT, {s})", .{self.symbol}); + try writer.print("INT64_TO_JSVALUE_SLOW(JS_GLOBAL_OBJECT, {any})", .{self.symbol}); }, .u64_fast => { - try writer.print("UINT64_TO_JSVALUE(JS_GLOBAL_OBJECT, {s})", .{self.symbol}); + try writer.print("UINT64_TO_JSVALUE(JS_GLOBAL_OBJECT, {any})", .{self.symbol}); }, .uint64_t => { - try writer.print("UINT64_TO_JSVALUE_SLOW(JS_GLOBAL_OBJECT, {s})", .{self.symbol}); + try writer.print("UINT64_TO_JSVALUE_SLOW(JS_GLOBAL_OBJECT, {any})", .{self.symbol}); }, .function, .cstring, .ptr => { - try writer.print("PTR_TO_JSVALUE({s})", .{self.symbol}); + try writer.print("PTR_TO_JSVALUE({any})", .{self.symbol}); }, .double => { - try writer.print("DOUBLE_TO_JSVALUE({s})", .{self.symbol}); + try writer.print("DOUBLE_TO_JSVALUE({any})", .{self.symbol}); }, .float => { - try writer.print("FLOAT_TO_JSVALUE({s})", .{self.symbol}); + try writer.print("FLOAT_TO_JSVALUE({any})", .{self.symbol}); }, } } diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig index a3632bf329e3cd..252915f5660d45 100644 --- a/src/bun.js/api/html_rewriter.zig +++ b/src/bun.js/api/html_rewriter.zig @@ -735,7 +735,7 @@ const DocumentHandler = struct { .Object, .ProxyObject, .Cell, .FinalObject => {}, else => |kind| { JSC.throwInvalidArguments( - "Expected object but received {s}", + "Expected object but received {any}", .{@as(string, @tagName(kind))}, global, exception, @@ -886,7 +886,7 @@ const ElementHandler = struct { .Object, .ProxyObject, .Cell, .FinalObject => {}, else => |kind| { JSC.throwInvalidArguments( - "Expected object but received {s}", + "Expected object but received {any}", .{@as(string, @tagName(kind))}, global, exception, diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 9f3333a27ac235..5ec0eb878769f2 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -413,13 +413,13 @@ pub const ServerConfig = struct { const protocol: string = if (args.ssl_config != null) "https" else "http"; args.base_uri = (if ((args.port == 80 and args.ssl_config == null) or (args.port == 443 and args.ssl_config != null)) - std.fmt.allocPrint(bun.default_allocator, "{s}://{s}/{s}", .{ + std.fmt.allocPrint(bun.default_allocator, "{any}://{any}/{any}", .{ protocol, args.base_url.hostname, strings.trimLeadingChar(args.base_url.pathname, '/'), }) else - std.fmt.allocPrint(bun.default_allocator, "{s}://{s}:{d}/{s}", .{ + std.fmt.allocPrint(bun.default_allocator, "{any}://{any}:{d}/{any}", .{ protocol, args.base_url.hostname, args.port, @@ -434,12 +434,12 @@ pub const ServerConfig = struct { const protocol: string = if (args.ssl_config != null) "https" else "http"; args.base_uri = (if ((args.port == 80 and args.ssl_config == null) or (args.port == 443 and args.ssl_config != null)) - std.fmt.allocPrint(bun.default_allocator, "{s}://{s}/", .{ + std.fmt.allocPrint(bun.default_allocator, "{any}://{any}/", .{ protocol, hostname, }) else - std.fmt.allocPrint(bun.default_allocator, "{s}://{s}:{d}/", .{ protocol, hostname, args.port })) catch unreachable; + std.fmt.allocPrint(bun.default_allocator, "{any}://{any}:{d}/", .{ protocol, hostname, args.port })) catch unreachable; if (!strings.isAllASCII(hostname)) { JSC.throwInvalidArguments("Unicode hostnames must already be encoded for now.\nnew URL(input).hostname should do the trick.", .{}, global, exception); @@ -1051,7 +1051,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp if (errcode != .SUCCESS or this.aborted or this.sendfile.remain == 0 or val == 0) { if (errcode != .AGAIN and errcode != .SUCCESS and errcode != .PIPE) { - Output.prettyErrorln("Error: {s}", .{@tagName(errcode)}); + Output.prettyErrorln("Error: {any}", .{@tagName(errcode)}); Output.flush(); } this.cleanupAndFinalizeAfterSendfile(); @@ -1075,7 +1075,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp this.sendfile.remain -= wrote; if (errcode != .AGAIN or this.aborted or this.sendfile.remain == 0 or sbytes == 0) { if (errcode != .AGAIN and errcode != .SUCCESS and errcode != .PIPE) { - Output.prettyErrorln("Error: {s}", .{@tagName(errcode)}); + Output.prettyErrorln("Error: {any}", .{@tagName(errcode)}); Output.flush(); } this.cleanupAndFinalizeAfterSendfile(); @@ -1691,7 +1691,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp } } - streamLog("onReject({s})", .{wrote_anything}); + streamLog("onReject({any})", .{wrote_anything}); if (req.aborted) { req.finalizeForAbort(); @@ -1948,7 +1948,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp vm.log, error.ExceptionOcurred, exception_list.toOwnedSlice(), - "{s} - {s} failed", + "{any} - {any} failed", .{ @as(string, @tagName(this.method)), this.ensurePathname() }, ); } else { @@ -2055,7 +2055,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp this.resp.writeHeader( "content-disposition", - std.fmt.bufPrint(&filename_buf, "filename=\"{s}\"", .{basename[0..@min(basename.len, 1024 - 32)]}) catch "", + std.fmt.bufPrint(&filename_buf, "filename=\"{any}\"", .{basename[0..@min(basename.len, 1024 - 32)]}) catch "", ); } } @@ -2667,7 +2667,7 @@ pub const ServerWebSocket = struct { message: []const u8, opcode: uws.Opcode, ) void { - log("onMessage({d}): {s}", .{ + log("onMessage({d}): {any}", .{ @enumToInt(opcode), message, }); @@ -4219,7 +4219,7 @@ pub fn NewServer(comptime ssl_enabled_: bool, comptime debug_mode_: bool) type { if (written > 0) { var message = output_buf[0..written]; - zig_str = ZigString.init(std.fmt.allocPrint(bun.default_allocator, "OpenSSL {s}", .{message}) catch unreachable); + zig_str = ZigString.init(std.fmt.allocPrint(bun.default_allocator, "OpenSSL {any}", .{message}) catch unreachable); zig_str.withEncoding().mark(); } } diff --git a/src/bun.js/api/transpiler.zig b/src/bun.js/api/transpiler.zig index c48f1f0931665a..75b5866bf111ce 100644 --- a/src/bun.js/api/transpiler.zig +++ b/src/bun.js/api/transpiler.zig @@ -381,7 +381,7 @@ fn transformOptionsFromJSC(ctx: JSC.C.JSContextRef, temp_allocator: std.mem.Allo const value_type = property_value.jsType(); if (!value_type.isStringLike()) { - JSC.throwInvalidArguments("define \"{s}\" must be a JSON string", .{prop}, ctx, exception); + JSC.throwInvalidArguments("define \"{any}\" must be a JSON string", .{prop}, ctx, exception); return transpiler; } @@ -667,7 +667,7 @@ fn transformOptionsFromJSC(ctx: JSC.C.JSContextRef, temp_allocator: std.mem.Allo var key = try key_.toOwnedSlice(bun.default_allocator); if (!JSLexer.isIdentifier(key)) { - JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{key}, ctx, exception); + JSC.throwInvalidArguments("\"{any}\" is not a valid ECMAScript identifier", .{key}, ctx, exception); bun.default_allocator.free(key); return transpiler; } @@ -687,7 +687,7 @@ fn transformOptionsFromJSC(ctx: JSC.C.JSContextRef, temp_allocator: std.mem.Allo var replacement_name = slice.slice(); if (!JSLexer.isIdentifier(replacement_name)) { - JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{replacement_name}, ctx, exception); + JSC.throwInvalidArguments("\"{any}\" is not a valid ECMAScript identifier", .{replacement_name}, ctx, exception); slice.deinit(); return transpiler; } @@ -760,7 +760,7 @@ pub fn constructor( return null; } - JSC.throwInvalidArguments("Error creating transpiler: {s}", .{@errorName(err)}, ctx, exception); + JSC.throwInvalidArguments("Error creating transpiler: {any}", .{@errorName(err)}, ctx, exception); return null; }; @@ -773,7 +773,7 @@ pub fn constructor( return null; } - JSC.throwInvalidArguments("Failed to load define: {s}", .{@errorName(err)}, ctx, exception); + JSC.throwInvalidArguments("Failed to load define: {any}", .{@errorName(err)}, ctx, exception); return null; }; @@ -1110,7 +1110,7 @@ pub fn transformSync( buffer_writer.reset(); var printer = JSPrinter.BufferPrinter.init(buffer_writer); _ = this.bundler.print(parse_result, @TypeOf(&printer), &printer, .esm_ascii) catch |err| { - JSC.JSError(bun.default_allocator, "Failed to print code: {s}", .{@errorName(err)}, ctx, exception); + JSC.JSError(bun.default_allocator, "Failed to print code: {any}", .{@errorName(err)}, ctx, exception); return null; }; @@ -1260,7 +1260,7 @@ pub fn scanImports( return null; } - JSC.throwInvalidArguments("Failed to scan imports: {s}", .{@errorName(err)}, ctx, exception); + JSC.throwInvalidArguments("Failed to scan imports: {any}", .{@errorName(err)}, ctx, exception); return null; }; diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index f8769dff97ad46..259a04934a3d46 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -273,7 +273,7 @@ pub const To = struct { if (comptime Info == .Struct) { if (comptime @hasDecl(Type, "Class") and @hasDecl(Type.Class, "isJavaScriptCoreClass")) { if (comptime !@hasDecl(Type, "finalize")) { - @compileError(comptime std.fmt.comptimePrint("JSC class {s} must implement finalize to prevent memory leaks", .{Type.Class.name})); + @compileError(comptime std.fmt.comptimePrint("JSC class {any} must implement finalize to prevent memory leaks", .{Type.Class.name})); } if (comptime !@hasDecl(Type, "toJS")) { @@ -583,12 +583,12 @@ pub const d = struct { } if (no_type) { - buf = buf ++ printIndented("{s}({s});\n", .{ + buf = buf ++ printIndented("{any}({any});\n", .{ func.name, args, }, indent); } else { - buf = buf ++ printIndented("{s}({s}): {s};\n", .{ + buf = buf ++ printIndented("{any}({any}): {any};\n", .{ func.name, args, func.@"return", @@ -618,12 +618,12 @@ pub const d = struct { } if (no_type) { - buf = buf ++ printIndented("function {s}({s});\n", .{ + buf = buf ++ printIndented("function {any}({any});\n", .{ func.name, args, }, indent); } else { - buf = buf ++ printIndented("function {s}({s}): {s};\n", .{ + buf = buf ++ printIndented("function {any}({any}): {any};\n", .{ func.name, args, func.@"return", @@ -676,7 +676,7 @@ pub const d = struct { if (klass.global) { buf = buf ++ printIndented("declare global {{\n", .{}, indent); } else { - buf = buf ++ printIndented("declare module \"{s}\" {{\n", .{klass.path}, indent); + buf = buf ++ printIndented("declare module \"{any}\" {{\n", .{klass.path}, indent); } indent += indent_level; @@ -734,9 +734,9 @@ pub const d = struct { const qualifier = if (!klass.default_export) "export " else ""; if (klass.interface) { - buf = buf ++ printIndented("export interface {s} {{\n", .{klass.name}, indent); + buf = buf ++ printIndented("export interface {any} {{\n", .{klass.name}, indent); } else { - buf = buf ++ printIndented("{s}class {s} {{\n", .{ qualifier, klass.name }, indent); + buf = buf ++ printIndented("{any}class {any} {{\n", .{ qualifier, klass.name }, indent); } indent += indent_level; @@ -781,7 +781,7 @@ pub const d = struct { buf = buf ++ printIndented("}}\n", .{}, indent); if (klass.default_export) { - buf = buf ++ printIndented("export = {s};\n", .{klass.name}, indent); + buf = buf ++ printIndented("export = {any};\n", .{klass.name}, indent); } break :brk; @@ -797,14 +797,14 @@ pub const d = struct { const first = splitter.next() orelse break :brk; const second = splitter.next() orelse { - buf = buf ++ printIndented("/** {s} */\n", .{std.mem.trim(u8, first, " ")}, indent); + buf = buf ++ printIndented("/** {any} */\n", .{std.mem.trim(u8, first, " ")}, indent); break :brk; }; buf = buf ++ printIndented("/**\n", .{}, indent); - buf = buf ++ printIndented(" * {s}\n", .{std.mem.trim(u8, first, " ")}, indent); - buf = buf ++ printIndented(" * {s}\n", .{std.mem.trim(u8, second, " ")}, indent); + buf = buf ++ printIndented(" * {any}\n", .{std.mem.trim(u8, first, " ")}, indent); + buf = buf ++ printIndented(" * {any}\n", .{std.mem.trim(u8, second, " ")}, indent); while (splitter.next()) |line| { - buf = buf ++ printIndented(" * {s}\n", .{std.mem.trim(u8, line, " ")}, indent); + buf = buf ++ printIndented(" * {any}\n", .{std.mem.trim(u8, line, " ")}, indent); } buf = buf ++ printIndented("*/\n", .{}, indent); } @@ -3278,7 +3278,7 @@ pub fn DOMCall( try writer.writeAll("JSC::DOMJIT::Effect::forPure(),\n "); } else if (effect.writes[0] == DOMEffect.pure.writes[0]) { try writer.print( - "JSC::DOMJIT::Effect::forReadKinds(JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}),\n ", + "JSC::DOMJIT::Effect::forReadKinds(JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}),\n ", .{ @tagName(effect.reads[0]), @tagName(effect.reads[1]), @@ -3288,7 +3288,7 @@ pub fn DOMCall( ); } else if (effect.reads[0] == DOMEffect.pure.reads[0]) { try writer.print( - "JSC::DOMJIT::Effect::forWriteKinds(JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}, JSC::DFG::AbstractHeapKind::{s}),\n ", + "JSC::DOMJIT::Effect::forWriteKinds(JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}, JSC::DFG::AbstractHeapKind::{any}),\n ", .{ @tagName(effect.writes[0]), @tagName(effect.writes[1]), diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index ea760234a41fbf..f0bda2ab32b9cf 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -295,7 +295,7 @@ pub const EventLoop = struct { this.virtual_machine.modules.onPoll(); }, else => if (Environment.allow_assert) { - bun.Output.prettyln("\nUnexpected tag: {s}\n", .{@tagName(task.tag())}); + bun.Output.prettyln("\nUnexpected tag: {any}\n", .{@tagName(task.tag())}); } else unreachable, } diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 2478159fd3eacd..7169657eae5267 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -1147,13 +1147,13 @@ pub const VirtualMachine = struct { .data = logger.rangeData( null, logger.Range.None, - std.fmt.allocPrint(vm.allocator, "Unexpected pending import in \"{s}\". To automatically install npm packages with Bun, please use an import statement instead of require() or dynamic import().\nThis error can also happen if dependencies import packages which are not referenced anywhere. Worst case, run `bun install` and opt-out of the node_modules folder until we come up with a better way to handle this error.", .{specifier.slice()}) catch unreachable, + std.fmt.allocPrint(vm.allocator, "Unexpected pending import in \"{any}\". To automatically install npm packages with Bun, please use an import statement instead of require() or dynamic import().\nThis error can also happen if dependencies import packages which are not referenced anywhere. Worst case, run `bun install` and opt-out of the node_modules folder until we come up with a better way to handle this error.", .{specifier.slice()}) catch unreachable, ), }; } break :brk logger.Msg{ - .data = logger.rangeData(null, logger.Range.None, std.fmt.allocPrint(vm.allocator, "{s} while building {s}", .{ @errorName(err), specifier.slice() }) catch unreachable), + .data = logger.rangeData(null, logger.Range.None, std.fmt.allocPrint(vm.allocator, "{any} while building {any}", .{ @errorName(err), specifier.slice() }) catch unreachable), }; }; { @@ -1198,7 +1198,7 @@ pub const VirtualMachine = struct { errors.ptr, @intCast(u16, errors.len), &ZigString.init( - std.fmt.allocPrint(vm.bundler.allocator, "{d} errors building \"{s}\"", .{ + std.fmt.allocPrint(vm.bundler.allocator, "{d} errors building \"{any}\"", .{ errors.len, specifier.slice(), }) catch unreachable, @@ -1510,7 +1510,7 @@ pub const VirtualMachine = struct { ) catch |err| { if (comptime Environment.isDebug) { // yo dawg - Output.printErrorln("Error while printing Error-like object: {s}", .{@errorName(err)}); + Output.printErrorln("Error while printing Error-like object: {any}", .{@errorName(err)}); Output.flush(); } }; @@ -1724,7 +1724,7 @@ pub const VirtualMachine = struct { last_pad = pad; try writer.writeByteNTimes(' ', pad); try writer.print( - comptime Output.prettyFmt("{d} | {s}\n", allow_ansi_color), + comptime Output.prettyFmt("{d} | {any}\n", allow_ansi_color), .{ source.line, std.mem.trim(u8, source.text, "\n"), @@ -1743,7 +1743,7 @@ pub const VirtualMachine = struct { try writer.print( comptime Output.prettyFmt( - "- | {s}\n", + "- | {any}\n", allow_ansi_color, ), .{ @@ -1762,7 +1762,7 @@ pub const VirtualMachine = struct { try writer.print( comptime Output.prettyFmt( - "{d} | {s}\n", + "{d} | {any}\n", allow_ansi_color, ), .{ source.line, remainder }, @@ -1823,13 +1823,13 @@ pub const VirtualMachine = struct { if (value.toStringOrNull(this.global)) |str| { var zig_str = str.toSlice(this.global, bun.default_allocator); defer zig_str.deinit(); - try writer.print(comptime Output.prettyFmt(" {s}: \"{s}\"\n", allow_ansi_color), .{ field, zig_str.slice() }); + try writer.print(comptime Output.prettyFmt(" {any}: \"{any}\"\n", allow_ansi_color), .{ field, zig_str.slice() }); add_extra_line = true; } } else if (kind.isObject() or kind.isArray()) { var zig_str = ZigString.init(""); value.jsonStringify(this.global, 2, &zig_str); - try writer.print(comptime Output.prettyFmt(" {s}: {s}\n", allow_ansi_color), .{ field, zig_str }); + try writer.print(comptime Output.prettyFmt(" {any}: {any}\n", allow_ansi_color), .{ field, zig_str }); add_extra_line = true; } } @@ -1843,7 +1843,7 @@ pub const VirtualMachine = struct { } else if (show.errno) { try writer.writeAll(" "); } - try writer.print(comptime Output.prettyFmt(" path: \"{s}\"\n", allow_ansi_color), .{exception.path}); + try writer.print(comptime Output.prettyFmt(" path: \"{any}\"\n", allow_ansi_color), .{exception.path}); } if (show.fd) { @@ -1862,12 +1862,12 @@ pub const VirtualMachine = struct { } else if (show.errno) { try writer.writeAll(" "); } - try writer.print(comptime Output.prettyFmt(" code: \"{s}\"\n", allow_ansi_color), .{exception.system_code}); + try writer.print(comptime Output.prettyFmt(" code: \"{any}\"\n", allow_ansi_color), .{exception.system_code}); add_extra_line = true; } if (show.syscall) { - try writer.print(comptime Output.prettyFmt("syscall: \"{s}\"\n", allow_ansi_color), .{exception.syscall}); + try writer.print(comptime Output.prettyFmt("syscall: \"{any}\"\n", allow_ansi_color), .{exception.syscall}); add_extra_line = true; } @@ -1888,18 +1888,18 @@ pub const VirtualMachine = struct { if (name.len > 0 and message.len > 0) { const display_name: ZigString = if (!name.is16Bit() and strings.eqlComptime(name.slice(), "Error")) ZigString.init("error") else name; - try writer.print(comptime Output.prettyFmt("{any}: {s}\n", allow_ansi_color), .{ + try writer.print(comptime Output.prettyFmt("{any}: {any}\n", allow_ansi_color), .{ display_name, message, }); } else if (name.len > 0) { if (name.is16Bit() or !strings.hasPrefixComptime(name.slice(), "error")) { - try writer.print(comptime Output.prettyFmt("error: {s}\n", allow_ansi_color), .{name}); + try writer.print(comptime Output.prettyFmt("error: {any}\n", allow_ansi_color), .{name}); } else { - try writer.print(comptime Output.prettyFmt("{s}\n", allow_ansi_color), .{name}); + try writer.print(comptime Output.prettyFmt("{any}\n", allow_ansi_color), .{name}); } } else if (message.len > 0) { - try writer.print(comptime Output.prettyFmt("error: {s}\n", allow_ansi_color), .{message}); + try writer.print(comptime Output.prettyFmt("error: {any}\n", allow_ansi_color), .{message}); } else { try writer.print(comptime Output.prettyFmt("error\n", allow_ansi_color), .{}); } @@ -2085,23 +2085,23 @@ pub const ResolveError = struct { switch (err) { error.ModuleNotFound => { if (Resolver.isPackagePath(specifier) and !strings.containsChar(specifier, '/')) { - return try std.fmt.allocPrint(allocator, "Cannot find package \"{s}\" from \"{s}\"", .{ specifier, referrer }); + return try std.fmt.allocPrint(allocator, "Cannot find package \"{any}\" from \"{any}\"", .{ specifier, referrer }); } else { - return try std.fmt.allocPrint(allocator, "Cannot find module \"{s}\" from \"{s}\"", .{ specifier, referrer }); + return try std.fmt.allocPrint(allocator, "Cannot find module \"{any}\" from \"{any}\"", .{ specifier, referrer }); } }, else => { if (Resolver.isPackagePath(specifier)) { - return try std.fmt.allocPrint(allocator, "{s} while resolving package \"{s}\" from \"{s}\"", .{ @errorName(err), specifier, referrer }); + return try std.fmt.allocPrint(allocator, "{any} while resolving package \"{any}\" from \"{any}\"", .{ @errorName(err), specifier, referrer }); } else { - return try std.fmt.allocPrint(allocator, "{s} while resolving \"{s}\" from \"{s}\"", .{ @errorName(err), specifier, referrer }); + return try std.fmt.allocPrint(allocator, "{any} while resolving \"{any}\" from \"{any}\"", .{ @errorName(err), specifier, referrer }); } }, } } pub fn toStringFn(this: *ResolveError, ctx: js.JSContextRef) js.JSValueRef { - var text = std.fmt.allocPrint(default_allocator, "ResolveError: {s}", .{this.msg.data.text}) catch return null; + var text = std.fmt.allocPrint(default_allocator, "ResolveError: {any}", .{this.msg.data.text}) catch return null; var str = ZigString.init(text); str.setOutputEncoding(); if (str.isUTF8()) { @@ -2316,7 +2316,7 @@ pub const BuildError = struct { ); pub fn toStringFn(this: *BuildError, ctx: js.JSContextRef) js.JSValueRef { - var text = std.fmt.allocPrint(default_allocator, "BuildError: {s}", .{this.msg.data.text}) catch return null; + var text = std.fmt.allocPrint(default_allocator, "BuildError: {any}", .{this.msg.data.text}) catch return null; var str = ZigString.init(text); str.setOutputEncoding(); if (str.isUTF8()) { @@ -2585,7 +2585,7 @@ pub const HotReloader = struct { const id = hashes[event.index]; if (comptime Environment.isDebug) { - Output.prettyErrorln("[watcher] {s}: -- {}", .{ @tagName(kind), event.op }); + Output.prettyErrorln("[watcher] {any}: -- {}", .{ @tagName(kind), event.op }); } switch (kind) { @@ -2600,7 +2600,7 @@ pub const HotReloader = struct { } if (comptime bun.FeatureFlags.verbose_watcher) { - Output.prettyErrorln("File changed: {s}", .{fs.relativeTo(file_path)}); + Output.prettyErrorln("File changed: {any}", .{fs.relativeTo(file_path)}); } if (event.op.write) { @@ -2658,7 +2658,7 @@ pub const HotReloader = struct { if (last_file_hash == file_hash) continue; last_file_hash = file_hash; - Output.prettyErrorln(" File change: {s}", .{fs.relativeTo(abs_path)}); + Output.prettyErrorln(" File change: {any}", .{fs.relativeTo(abs_path)}); } } } @@ -2666,9 +2666,9 @@ pub const HotReloader = struct { // if (event.op.delete or event.op.rename) // ctx.watcher.removeAtIndex(event.index, hashes[event.index], parent_hashes, .directory); if (comptime false) { - Output.prettyErrorln("📁 Dir change: {s}", .{fs.relativeTo(file_path)}); + Output.prettyErrorln("📁 Dir change: {any}", .{fs.relativeTo(file_path)}); } else { - Output.prettyErrorln(" Dir change: {s}", .{fs.relativeTo(file_path)}); + Output.prettyErrorln(" Dir change: {any}", .{fs.relativeTo(file_path)}); } }, } diff --git a/src/bun.js/node/node_fs_binding.zig b/src/bun.js/node/node_fs_binding.zig index 110b3fc5a66b2f..261163d39da5f6 100644 --- a/src/bun.js/node/node_fs_binding.zig +++ b/src/bun.js/node/node_fs_binding.zig @@ -77,10 +77,10 @@ fn call(comptime Function: NodeFSFunctionEnum) NodeFSFunction { // const function: std.builtin.Type.Fn = comptime @typeInfo(FunctionType).Fn; // comptime if (function.args.len != 3) @compileError("Expected 3 arguments"); - // const Arguments = comptime function.args[2].arg_type orelse @compileError(std.fmt.comptimePrint("Function {s} expected to have an arg type at [2]", .{@typeName(FunctionType)})); + // const Arguments = comptime function.args[2].arg_type orelse @compileError(std.fmt.comptimePrint("Function {any} expected to have an arg type at [2]", .{@typeName(FunctionType)})); // const Result = comptime function.return_type.?; - // comptime if (Arguments != void and !fromJSTrait(Arguments)) @compileError(std.fmt.comptimePrint("{s} is missing fromJS()", .{@typeName(Arguments)})); - // comptime if (Result != void and !toJSTrait(Result)) @compileError(std.fmt.comptimePrint("{s} is missing toJS()", .{@typeName(Result)})); + // comptime if (Arguments != void and !fromJSTrait(Arguments)) @compileError(std.fmt.comptimePrint("{any} is missing fromJS()", .{@typeName(Arguments)})); + // comptime if (Result != void and !toJSTrait(Result)) @compileError(std.fmt.comptimePrint("{any} is missing toJS()", .{@typeName(Result)})); const NodeBindingClosure = struct { pub fn bind( this: *JSC.Node.NodeFS, diff --git a/src/bun.js/node/node_fs_constant.zig b/src/bun.js/node/node_fs_constant.zig index 72e752184cc42e..724fe49ed228ab 100644 --- a/src/bun.js/node/node_fs_constant.zig +++ b/src/bun.js/node/node_fs_constant.zig @@ -169,7 +169,7 @@ const constants_string_format1 = \\ O_DSYNC: {d}, ; const constants_string_format2 = - \\ O_SYMLINK: {s}, + \\ O_SYMLINK: {any}, \\ O_DIRECT: {d}, \\ O_NONBLOCK: {d}, \\ S_IFMT: {d}, diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 89d466c4e0aac4..bb7ef85ca9da41 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -1564,7 +1564,7 @@ pub const Path = struct { if (name_.isEmpty()) { return JSC.ZigString.Empty.toValue(globalThis); } - const out = std.fmt.allocPrint(allocator, "{s}{s}", .{ name_, ext }) catch unreachable; + const out = std.fmt.allocPrint(allocator, "{any}{any}", .{ name_, ext }) catch unreachable; defer allocator.free(out); return JSC.ZigString.init(out).withEncoding().toValueGC(globalThis); @@ -1573,13 +1573,13 @@ pub const Path = struct { if (insert_separator) { const separator = if (!isWindows) "/" else "\\"; if (name_with_ext.isEmpty()) { - const out = std.fmt.allocPrint(allocator, "{}{s}{}{}", .{ dir, separator, name_, ext }) catch unreachable; + const out = std.fmt.allocPrint(allocator, "{}{any}{}{}", .{ dir, separator, name_, ext }) catch unreachable; defer allocator.free(out); return JSC.ZigString.init(out).withEncoding().toValueGC(globalThis); } { - const out = std.fmt.allocPrint(allocator, "{}{s}{}", .{ + const out = std.fmt.allocPrint(allocator, "{}{any}{}", .{ dir, separator, name_with_ext, diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig index 5dce0eac3cb818..87c5a5a4bf8e71 100644 --- a/src/bun.js/test/jest.zig +++ b/src/bun.js/test/jest.zig @@ -325,7 +325,7 @@ pub const Expect = struct { var rhs_formatter: JSC.ZigConsoleClient.Formatter = JSC.ZigConsoleClient.Formatter{ .globalThis = globalObject }; if (comptime Environment.allow_assert) { - Output.prettyErrorln("\nJSType: {s}\nJSType: {s}\n\n", .{ @tagName(left.jsType()), @tagName(right.jsType()) }); + Output.prettyErrorln("\nJSType: {any}\nJSType: {any}\n\n", .{ @tagName(left.jsType()), @tagName(right.jsType()) }); } globalObject.throw( diff --git a/src/bun.js/uuid.zig b/src/bun.js/uuid.zig index fa59520c2dc353..f2f93c96ffb808 100644 --- a/src/bun.js/uuid.zig +++ b/src/bun.js/uuid.zig @@ -73,7 +73,7 @@ pub fn format( var buf: [36]u8 = undefined; self.print(&buf); - try fmt.format(writer, "{s}", .{buf}); + try fmt.format(writer, "{any}", .{buf}); } pub fn print( diff --git a/src/bun.js/webcore.zig b/src/bun.js/webcore.zig index c8bf7545da65a9..6c923421fffd79 100644 --- a/src/bun.js/webcore.zig +++ b/src/bun.js/webcore.zig @@ -275,7 +275,7 @@ pub const Prompt = struct { const default_string = arguments[1].?.value().toSlice(ctx.ptr(), allocator); defer default_string.deinit(); - output.print("[{s}] ", .{default_string.slice()}) catch { + output.print("[{any}] ", .{default_string.slice()}) catch { // 1. If we cannot show simple dialogs for this, then return false. return JSC.JSValue.jsBoolean(false).asObjectRef(); }; @@ -390,7 +390,7 @@ pub const Crypto = struct { } var array_buffer = arguments[0].asArrayBuffer(globalThis) orelse { - globalThis.throwInvalidArguments("Expected typed array but got {s}", .{@tagName(arguments[0].jsType())}); + globalThis.throwInvalidArguments("Expected typed array but got {any}", .{@tagName(arguments[0].jsType())}); return JSC.JSValue.jsUndefined(); }; var slice = array_buffer.byteSlice(); diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 05ad02525168f7..6d18c71d5ae6bb 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -2165,7 +2165,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { } pub fn flushFromJS(this: *@This(), globalThis: *JSGlobalObject, wait: bool) JSC.Node.Maybe(JSValue) { - log("flushFromJS({s})", .{wait}); + log("flushFromJS({any})", .{wait}); if (!wait) { return this.flushFromJSNoWait(); } diff --git a/src/bun_js.zig b/src/bun_js.zig index d406033a1ccf64..8f05eb56007c97 100644 --- a/src/bun_js.zig +++ b/src/bun_js.zig @@ -101,7 +101,7 @@ pub const Run = struct { null, logger.Loc.Empty, run.vm.allocator, - "BUN_CONFIG_MAX_HTTP_REQUESTS value \"{s}\" is not a valid integer between 1 and 65535", + "BUN_CONFIG_MAX_HTTP_REQUESTS value \"{any}\" is not a valid integer between 1 and 65535", .{max_http_requests}, ) catch unreachable; break :load; diff --git a/src/bundler.zig b/src/bundler.zig index 8c9cdb7699ea11..6fa9b06695f1ac 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -409,13 +409,13 @@ pub const Bundler = struct { }; if (has_dot_slash_form) { - bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving \"{s}\". Did you mean: \"./{s}\"", .{ + bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{any} resolving \"{any}\". Did you mean: \"./{any}\"", .{ @errorName(err), entry_point, entry_point, }) catch unreachable; } else { - bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} resolving \"{s}\" (entry point)", .{ @errorName(err), entry_point }) catch unreachable; + bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{any} resolving \"{any}\" (entry point)", .{ @errorName(err), entry_point }) catch unreachable; } return err; @@ -1372,7 +1372,7 @@ pub const Bundler = struct { true, file_descriptor, ) catch |err| { - bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} reading \"{s}\"", .{ @errorName(err), path.text }) catch {}; + bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{any} reading \"{any}\"", .{ @errorName(err), path.text }) catch {}; return null; }; input_fd = entry.fd; @@ -1511,7 +1511,7 @@ pub const Bundler = struct { null, logger.Loc.Empty, bundler.allocator, - "Invalid wasm file \"{s}\" (missing magic header)", + "Invalid wasm file \"{any}\" (missing magic header)", .{path.text}, ) catch {}; return null; @@ -1526,7 +1526,7 @@ pub const Bundler = struct { } }, .css => {}, - else => Global.panic("Unsupported loader {s} for path: {s}", .{ loader, source.path.text }), + else => Global.panic("Unsupported loader {any} for path: {any}", .{ loader, source.path.text }), } return null; @@ -1680,12 +1680,12 @@ pub const Bundler = struct { } const result = bundler.resolver.resolve(bundler.fs.top_level_dir, entry, .entry_point) catch |err| { - Output.prettyError("Error resolving \"{s}\": {s}\n", .{ entry, @errorName(err) }); + Output.prettyError("Error resolving \"{any}\": {any}\n", .{ entry, @errorName(err) }); continue; }; if (result.pathConst() == null) { - Output.prettyError("\"{s}\" is disabled due to \"browser\" field in package.json.\n", .{ + Output.prettyError("\"{any}\" is disabled due to \"browser\" field in package.json.\n", .{ entry, }); continue; diff --git a/src/bunfig.zig b/src/bunfig.zig index d2ca863806158d..0224f04eb46e45 100644 --- a/src/bunfig.zig +++ b/src/bunfig.zig @@ -64,11 +64,11 @@ pub const Bunfig = struct { // Token if (url.username.len == 0 and url.password.len > 0) { registry.token = url.password; - registry.url = try std.fmt.allocPrint(this.allocator, "{s}://{s}/{s}", .{ url.displayProtocol(), url.displayHostname(), std.mem.trimLeft(u8, url.pathname, "/") }); + registry.url = try std.fmt.allocPrint(this.allocator, "{any}://{any}/{any}", .{ url.displayProtocol(), url.displayHostname(), std.mem.trimLeft(u8, url.pathname, "/") }); } else if (url.username.len > 0 and url.password.len > 0) { registry.username = url.username; registry.password = url.password; - registry.url = try std.fmt.allocPrint(this.allocator, "{s}://{s}/{s}", .{ url.displayProtocol(), url.displayHostname(), std.mem.trimLeft(u8, url.pathname, "/") }); + registry.url = try std.fmt.allocPrint(this.allocator, "{any}://{any}/{any}", .{ url.displayProtocol(), url.displayHostname(), std.mem.trimLeft(u8, url.pathname, "/") }); } else { registry.url = url.href; } diff --git a/src/cache.zig b/src/cache.zig index 2ff1ce896e1a5d..a89a597f4530a8 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -117,14 +117,14 @@ pub const Fs = struct { const file = if (this.stream) rfs.readFileWithHandle(path, null, file_handle, true, shared, true) catch |err| { if (comptime Environment.isDebug) { - Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); + Output.printError("{any}: readFile error -- {any}", .{ path, @errorName(err) }); } return err; } else rfs.readFileWithHandle(path, null, file_handle, true, shared, false) catch |err| { if (comptime Environment.isDebug) { - Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); + Output.printError("{any}: readFile error -- {any}", .{ path, @errorName(err) }); } return err; }; @@ -154,7 +154,7 @@ pub const Fs = struct { error.FileNotFound => { const handle = try std.fs.openFileAbsolute(path, .{ .mode = .read_only }); Output.prettyErrorln( - "Internal error: directory mismatch for directory \"{s}\", fd {d}. You don't need to do anything, but this indicates a bug.", + "Internal error: directory mismatch for directory \"{any}\", fd {d}. You don't need to do anything, but this indicates a bug.", .{ path, dirname_fd }, ); break :brk handle; @@ -176,14 +176,14 @@ pub const Fs = struct { const file = if (c.stream) rfs.readFileWithHandle(path, null, file_handle, use_shared_buffer, c.sharedBuffer(), true) catch |err| { if (Environment.isDebug) { - Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); + Output.printError("{any}: readFile error -- {any}", .{ path, @errorName(err) }); } return err; } else rfs.readFileWithHandle(path, null, file_handle, use_shared_buffer, c.sharedBuffer(), false) catch |err| { if (Environment.isDebug) { - Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) }); + Output.printError("{any}: readFile error -- {any}", .{ path, @errorName(err) }); } return err; }; diff --git a/src/cli.zig b/src/cli.zig index f0e7526b52ce29..b8b31add4c52d7 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -129,7 +129,7 @@ pub const Arguments = struct { } pub fn fileReadError(err: anyerror, stderr: anytype, filename: string, kind: string) noreturn { - stderr.writer().print("Error reading file \"{s}\" for {s}: {s}", .{ filename, kind, @errorName(err) }) catch {}; + stderr.writer().print("Error reading file \"{any}\" for {any}: {any}", .{ filename, kind, @errorName(err) }) catch {}; std.process.exit(1); } @@ -223,7 +223,7 @@ pub const Arguments = struct { fn loadConfigPath(allocator: std.mem.Allocator, auto_loaded: bool, config_path: [:0]const u8, ctx: *Command.Context, comptime cmd: Command.Tag) !void { var config_file = std.fs.openFileAbsoluteZ(config_path, .{ .mode = .read_only }) catch |err| { if (auto_loaded) return; - Output.prettyErrorln("error: {s} opening config \"{s}\"", .{ + Output.prettyErrorln("error: {any} opening config \"{any}\"", .{ @errorName(err), std.mem.span(config_path), }); @@ -232,7 +232,7 @@ pub const Arguments = struct { defer config_file.close(); var contents = config_file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| { if (auto_loaded) return; - Output.prettyErrorln("error: {s} reading config \"{s}\"", .{ + Output.prettyErrorln("error: {any} reading config \"{any}\"", .{ @errorName(err), std.mem.span(config_path), }); @@ -453,7 +453,7 @@ pub const Arguments = struct { } else if (enum_value.len == 0) { ctx.debug.global_cache = options.GlobalCache.force; } else { - Output.prettyErrorln("Invalid value for --install: \"{s}\". Must be either \"auto\", \"fallback\", \"force\", or \"disable\"\n", .{enum_value}); + Output.prettyErrorln("Invalid value for --install: \"{any}\". Must be either \"auto\", \"fallback\", \"force\", or \"disable\"\n", .{enum_value}); Global.exit(1); } } @@ -477,7 +477,7 @@ pub const Arguments = struct { } else if (strings.eqlComptime(setting, "external")) { opts.source_map = Api.SourceMapMode.external; } else { - Output.prettyErrorln("error: Invalid sourcemap setting: \"{s}\"", .{setting}); + Output.prettyErrorln("error: Invalid sourcemap setting: \"{any}\"", .{setting}); Global.crash(); } } @@ -1225,12 +1225,12 @@ pub const Command = struct { } if (was_js_like) { - Output.prettyErrorln("error: Module not found \"{s}\"", .{ + Output.prettyErrorln("error: Module not found \"{any}\"", .{ ctx.positionals[0], }); Global.exit(1); } else if (ctx.positionals.len > 0) { - Output.prettyErrorln("error: File not found \"{s}\"", .{ + Output.prettyErrorln("error: File not found \"{any}\"", .{ ctx.positionals[0], }); Global.exit(1); @@ -1299,7 +1299,7 @@ pub const Command = struct { ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; } - Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ + Output.prettyErrorln("error: Failed to run {any} due to error {any}", .{ std.fs.path.basename(file_path), @errorName(err), }); diff --git a/src/cli/bun_command.zig b/src/cli/bun_command.zig index 77e639f1802a92..63d1e5f9003f60 100644 --- a/src/cli/bun_command.zig +++ b/src/cli/bun_command.zig @@ -57,7 +57,7 @@ const ServerBundleGeneratorThread = struct { server_bundler.router = router; server_bundler.configureDefines() catch |err| { - Output.prettyErrorln("{s} loading --define or .env values for node_modules.server.bun\n", .{@errorName(err)}); + Output.prettyErrorln("{any} loading --define or .env values for node_modules.server.bun\n", .{@errorName(err)}); return err; }; @@ -225,9 +225,9 @@ pub const BunCommand = struct { Output.prettyln(indent ++ "{d:6}ms elapsed", .{@intCast(u32, elapsed)}); if (generated_server) { - Output.prettyln(indent ++ "Saved to ./{s}, ./{s}", .{ filepath, server_bundle_filepath }); + Output.prettyln(indent ++ "Saved to ./{any}, ./{any}", .{ filepath, server_bundle_filepath }); } else { - Output.prettyln(indent ++ "Saved to ./{s}", .{filepath}); + Output.prettyln(indent ++ "Saved to ./{any}", .{filepath}); } Output.flush(); diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 27531e4ad73cad..8d8d79a8a3214b 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -87,7 +87,7 @@ const UnsupportedPackages = struct { pub fn print(this: UnsupportedPackages) void { inline for (comptime std.meta.fieldNames(UnsupportedPackages)) |field_name| { if (@field(this, field_name)) { - Output.prettyErrorln("warn: \"{s}\" won't work in bun yet\n", .{field_name}); + Output.prettyErrorln("warn: \"{any}\" won't work in bun yet\n", .{field_name}); } } } @@ -132,9 +132,9 @@ fn execTask(allocator: std.mem.Allocator, task_: string, cwd: string, _: string, Output.pretty("\n$", .{}); for (argv) |arg, i| { if (i > argv.len - 1) { - Output.print(" {s} ", .{arg}); + Output.print(" {any} ", .{arg}); } else { - Output.print(" {s}", .{arg}); + Output.print(" {any}", .{arg}); } } Output.pretty("", .{}); @@ -285,7 +285,7 @@ pub const CreateCommand = struct { var outdir_path_ = home_dir_buf[0..outdir_path.len :0]; std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer; if (create_options.verbose) { - Output.prettyErrorln("reading from {s}", .{outdir_path}); + Output.prettyErrorln("reading from {any}", .{outdir_path}); } example_tag = Example.Tag.local_folder; break :brk outdir_path; @@ -299,7 +299,7 @@ pub const CreateCommand = struct { var outdir_path_ = home_dir_buf[0..outdir_path.len :0]; std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer; if (create_options.verbose) { - Output.prettyErrorln("reading from {s}", .{outdir_path}); + Output.prettyErrorln("reading from {any}", .{outdir_path}); } example_tag = Example.Tag.local_folder; break :brk outdir_path; @@ -313,7 +313,7 @@ pub const CreateCommand = struct { var outdir_path_ = home_dir_buf[0..outdir_path.len :0]; std.fs.accessAbsoluteZ(outdir_path_, .{}) catch break :outer; if (create_options.verbose) { - Output.prettyErrorln("reading from {s}", .{outdir_path}); + Output.prettyErrorln("reading from {any}", .{outdir_path}); } example_tag = Example.Tag.local_folder; break :brk outdir_path; @@ -379,7 +379,7 @@ pub const CreateCommand = struct { const destination = try filesystem.dirname_store.append([]const u8, resolve_path.joinAbs(filesystem.top_level_dir, .auto, dirname)); var progress = std.Progress{}; - var node = progress.start(try ProgressBuf.print("Loading {s}", .{template}), 0); + var node = progress.start(try ProgressBuf.print("Loading {any}", .{template}), 0); progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; // alacritty is fast @@ -399,7 +399,7 @@ pub const CreateCommand = struct { var package_json_file: ?std.fs.File = null; if (create_options.verbose) { - Output.prettyErrorln("Downloading as {s}\n", .{@tagName(example_tag)}); + Output.prettyErrorln("Downloading as {any}\n", .{@tagName(example_tag)}); } switch (example_tag) { @@ -411,7 +411,7 @@ pub const CreateCommand = struct { node.end(); progress.refresh(); - Output.prettyError("\nerror: \"{s}\" was not found. Here are templates you can use:\n\n", .{ + Output.prettyError("\nerror: \"{any}\" was not found. Here are templates you can use:\n\n", .{ template, }); Output.flush(); @@ -444,7 +444,7 @@ pub const CreateCommand = struct { node.end(); progress.refresh(); - Output.prettyError("\nerror: \"{s}\" was not found on GitHub. Here are templates you can use:\n\n", .{ + Output.prettyError("\nerror: \"{any}\" was not found on GitHub. Here are templates you can use:\n\n", .{ template, }); Output.flush(); @@ -466,7 +466,7 @@ pub const CreateCommand = struct { else => unreachable, }; - node.name = try ProgressBuf.print("Decompressing {s}", .{template}); + node.name = try ProgressBuf.print("Decompressing {any}", .{template}); node.setCompletedItems(0); node.setEstimatedTotalItems(0); @@ -479,7 +479,7 @@ pub const CreateCommand = struct { try gunzip.readAll(); gunzip.deinit(); - node.name = try ProgressBuf.print("Extracting {s}", .{template}); + node.name = try ProgressBuf.print("Extracting {any}", .{template}); node.setCompletedItems(0); node.setEstimatedTotalItems(0); @@ -516,21 +516,21 @@ pub const CreateCommand = struct { // Thank you create-react-app for this copy (and idea) Output.prettyErrorln( - "\nerror: The directory {s}/ contains files that could conflict:\n\n", + "\nerror: The directory {any}/ contains files that could conflict:\n\n", .{ std.fs.path.basename(destination), }, ); for (archive_context.overwrite_list.keys()) |path| { if (strings.endsWith(path, std.fs.path.sep_str)) { - Output.prettyError(" {s}", .{path[0 .. std.math.max(path.len, 1) - 1]}); + Output.prettyError(" {any}", .{path[0 .. std.math.max(path.len, 1) - 1]}); Output.prettyErrorln(std.fs.path.sep_str, .{}); } else { - Output.prettyErrorln(" {s}", .{path}); + Output.prettyErrorln(" {any}", .{path}); } } - Output.prettyErrorln("\nTo download {s} anyway, use --force", .{template}); + Output.prettyErrorln("\nTo download {any} anyway, use --force", .{template}); Global.exit(1); } } @@ -568,7 +568,7 @@ pub const CreateCommand = struct { node.end(); progress.refresh(); - Output.prettyErrorln("{s}: opening dir {s}", .{ @errorName(err), template }); + Output.prettyErrorln("{any}: opening dir {any}", .{ @errorName(err), template }); Global.exit(1); }; @@ -578,7 +578,7 @@ pub const CreateCommand = struct { progress.refresh(); - Output.prettyErrorln("{s}: creating dir {s}", .{ @errorName(err), destination }); + Output.prettyErrorln("{any}: creating dir {any}", .{ @errorName(err), destination }); Global.exit(1); }; @@ -605,7 +605,7 @@ pub const CreateCommand = struct { progress_.refresh(); - Output.prettyErrorln("{s}: copying file {s}", .{ @errorName(err), entry.path }); + Output.prettyErrorln("{any}: copying file {any}", .{ @errorName(err), entry.path }); Global.exit(1); }; }; @@ -625,7 +625,7 @@ pub const CreateCommand = struct { progress_.refresh(); - Output.prettyErrorln("{s}: copying file {s}", .{ @errorName(err), entry.path }); + Output.prettyErrorln("{any}: copying file {any}", .{ @errorName(err), entry.path }); Global.exit(1); }; }; @@ -645,7 +645,7 @@ pub const CreateCommand = struct { progress.refresh(); package_json_file = null; - Output.prettyErrorln("Error reading package.json: {s}", .{@errorName(err)}); + Output.prettyErrorln("Error reading package.json: {any}", .{@errorName(err)}); break :read_package_json; }; @@ -666,7 +666,7 @@ pub const CreateCommand = struct { progress.refresh(); - Output.prettyErrorln("Error reading package.json: {s}", .{@errorName(err)}); + Output.prettyErrorln("Error reading package.json: {any}", .{@errorName(err)}); break :read_package_json; }; // The printer doesn't truncate, so we must do so manually @@ -1280,7 +1280,7 @@ pub const CreateCommand = struct { create_react_app_entry_point_path = std.fmt.allocPrint( ctx.allocator, - "./{s}", + "./{any}", .{ std.mem.trimLeft( @@ -1292,7 +1292,7 @@ pub const CreateCommand = struct { ) catch break :bail; html_writer.print( - "\n{s}", + "\n{any}", .{ create_react_app_entry_point_path[2..], public_index_file_contents[body_closing_tag..], @@ -1321,7 +1321,7 @@ pub const CreateCommand = struct { std.os.ftruncate(public_index_html_file.handle, outfile.len + 1) catch break :bail; bun_bun_for_react_scripts = true; is_create_react_app = true; - Output.prettyln("[package.json] Added entry point {s} to public/index.html", .{create_react_app_entry_point_path}); + Output.prettyln("[package.json] Added entry point {any} to public/index.html", .{create_react_app_entry_point_path}); } } @@ -1352,7 +1352,7 @@ pub const CreateCommand = struct { strings.contains(script, "react-scripts eject")) { if (create_options.verbose) { - Output.prettyErrorln("[package.json] Pruned unnecessary script: {s}", .{script}); + Output.prettyErrorln("[package.json] Pruned unnecessary script: {any}", .{script}); } continue; @@ -1451,7 +1451,7 @@ pub const CreateCommand = struct { var package_json_writer = JSPrinter.NewFileWriter(package_json_file.?); const written = JSPrinter.printJSON(@TypeOf(package_json_writer), package_json_writer, package_json_expr, &source) catch |err| { - Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); + Output.prettyErrorln("package.json failed to write due to error {any}", .{@errorName(err)}); package_json_file = null; break :process_package_json; }; @@ -1462,7 +1462,7 @@ pub const CreateCommand = struct { if (needs.bun_bun_for_nextjs) { try postinstall_tasks.append(ctx.allocator, InjectionPrefill.bun_bun_for_nextjs_task); } else if (bun_bun_for_react_scripts) { - try postinstall_tasks.append(ctx.allocator, try std.fmt.allocPrint(ctx.allocator, "bun bun {s}", .{create_react_app_entry_point_path})); + try postinstall_tasks.append(ctx.allocator, try std.fmt.allocPrint(ctx.allocator, "bun bun {any}", .{create_react_app_entry_point_path})); } } } @@ -1505,12 +1505,12 @@ pub const CreateCommand = struct { const start_time = std.time.nanoTimestamp(); const install_args = &[_]string{ npm_client.bin, "install" }; Output.flush(); - Output.pretty("\n$ {s} install", .{@tagName(npm_client.tag)}); + Output.pretty("\n$ {any} install", .{@tagName(npm_client.tag)}); if (install_args.len > 2) { for (install_args[2..]) |arg| { Output.pretty(" ", .{}); - Output.pretty("{s}", .{arg}); + Output.pretty("{any}", .{arg}); } } @@ -1523,7 +1523,7 @@ pub const CreateCommand = struct { defer { Output.printErrorln("\n", .{}); Output.printStartEnd(start_time, std.time.nanoTimestamp()); - Output.prettyError(" {s} install\n", .{@tagName(npm_client.tag)}); + Output.prettyError(" {any} install\n", .{@tagName(npm_client.tag)}); Output.flush(); Output.print("\n", .{}); @@ -1547,7 +1547,7 @@ pub const CreateCommand = struct { Output.printError("\n", .{}); Output.printStartEnd(ctx.start_time, std.time.nanoTimestamp()); - Output.prettyErrorln(" bun create {s}", .{template}); + Output.prettyErrorln(" bun create {any}", .{template}); Output.flush(); @@ -1604,13 +1604,13 @@ pub const CreateCommand = struct { Output.pretty( \\ - \\Success! {s} loaded into {s} + \\Success! {any} loaded into {any} \\ , .{ display_name, std.fs.path.basename(destination) }); } else { Output.pretty( \\ - \\Created {s} project successfully + \\Created {any} project successfully \\ , .{std.fs.path.basename(template)}); } @@ -1628,7 +1628,7 @@ pub const CreateCommand = struct { \\ \\# When dependencies change, run this to update node_modules.bun: \\ - \\ bun bun {s} + \\ bun bun {any} \\ , .{create_react_app_entry_point_path}); } @@ -1637,8 +1637,8 @@ pub const CreateCommand = struct { \\ \\# To get started, run: \\ - \\ cd {s} - \\ {s} + \\ cd {any} + \\ {any} \\ \\ , .{ @@ -1698,16 +1698,16 @@ pub const Example = struct { var app_name_buf: [512]u8 = undefined; pub fn print(examples: []const Example, default_app_name: ?string) void { for (examples) |example| { - var app_name = default_app_name orelse (std.fmt.bufPrint(&app_name_buf, "./{s}-app", .{example.name[0..std.math.min(example.name.len, 492)]}) catch unreachable); + var app_name = default_app_name orelse (std.fmt.bufPrint(&app_name_buf, "./{any}-app", .{example.name[0..std.math.min(example.name.len, 492)]}) catch unreachable); if (example.description.len > 0) { - Output.pretty(" # {s}\n bun create {s} {s}\n \n\n", .{ + Output.pretty(" # {any}\n bun create {any} {any}\n \n\n", .{ example.description, example.name, app_name, }); } else { - Output.pretty(" bun create {s} {s}\n\n", .{ + Output.pretty(" bun create {any} {any}\n\n", .{ example.name, app_name, }); @@ -1802,7 +1802,7 @@ pub const Example = struct { repository = repository[0..i]; } - progress.name = try ProgressBuf.pretty("[github] GET {s}/{s}", .{ owner, repository }); + progress.name = try ProgressBuf.pretty("[github] GET {any}/{any}", .{ owner, repository }); refresher.refresh(); var github_api_domain: string = "api.github.com"; @@ -1815,7 +1815,7 @@ pub const Example = struct { var api_url = URL.parse( try std.fmt.bufPrint( &github_repository_url_buf, - "https://{s}/repos/{s}/{s}/tarball", + "https://{any}/repos/{any}/{any}/tarball", .{ github_api_domain, owner, repository }, ), ); @@ -1825,7 +1825,7 @@ pub const Example = struct { if (env_loader.map.get("GITHUB_ACCESS_TOKEN")) |access_token| { if (access_token.len > 0) { - headers_buf = try std.fmt.allocPrint(ctx.allocator, "Access-TokenBearer {s}", .{access_token}); + headers_buf = try std.fmt.allocPrint(ctx.allocator, "Access-TokenBearer {any}", .{access_token}); try header_entries.append( ctx.allocator, Headers.Kv{ @@ -1887,7 +1887,7 @@ pub const Example = struct { refresher.refresh(); if (content_type.len > 0) { - Output.prettyErrorln("error: Unexpected content type from GitHub: {s}", .{content_type}); + Output.prettyErrorln("error: Unexpected content type from GitHub: {any}", .{content_type}); Global.crash(); } else { Output.prettyErrorln("error: Invalid response from GitHub (missing content type)", .{}); @@ -1914,7 +1914,7 @@ pub const Example = struct { var mutable = try ctx.allocator.create(MutableString); mutable.* = try MutableString.init(ctx.allocator, 2048); - url = URL.parse(try std.fmt.bufPrint(&url_buf, "https://registry.npmjs.org/@bun-examples/{s}/latest", .{name})); + url = URL.parse(try std.fmt.bufPrint(&url_buf, "https://registry.npmjs.org/@bun-examples/{any}/latest", .{name})); // ensure very stable memory address var async_http: *HTTP.AsyncHTTP = ctx.allocator.create(HTTP.AsyncHTTP) catch unreachable; @@ -1956,7 +1956,7 @@ pub const Example = struct { } Global.exit(1); } else { - Output.prettyErrorln("Error parsing package: {s}", .{@errorName(err)}); + Output.prettyErrorln("Error parsing package: {any}", .{@errorName(err)}); Global.exit(1); } }; @@ -2058,14 +2058,14 @@ pub const Example = struct { Global.exit(1); }, else => { - Output.prettyErrorln("{s} while trying to fetch examples list. Please try again", .{@errorName(err)}); + Output.prettyErrorln("{any} while trying to fetch examples list. Please try again", .{@errorName(err)}); Global.exit(1); }, } }; if (response.status_code != 200) { - Output.prettyErrorln("{d} fetching examples :( {s}", .{ response.status_code, mutable.list.items }); + Output.prettyErrorln("{d} fetching examples :( {any}", .{ response.status_code, mutable.list.items }); Global.exit(1); } @@ -2080,7 +2080,7 @@ pub const Example = struct { } Global.exit(1); } else { - Output.prettyErrorln("Error parsing examples: {s}", .{@errorName(err)}); + Output.prettyErrorln("Error parsing examples: {any}", .{@errorName(err)}); Global.exit(1); } }; @@ -2114,7 +2114,7 @@ pub const Example = struct { } } - Output.prettyErrorln("Corrupt examples data: expected object but received {s}", .{@tagName(examples_object.data)}); + Output.prettyErrorln("Corrupt examples data: expected object but received {any}", .{@tagName(examples_object.data)}); Global.exit(1); } }; @@ -2146,7 +2146,7 @@ pub const CreateListExamplesCommand = struct { if (env_loader.map.get("HOME")) |homedir| { Output.prettyln( - "This command is completely optional. To add a new local template, create a folder in {s}/.bun-create/. To publish a new template, git clone https://github.com/oven-sh/bun, add a new folder to the \"examples\" folder, and submit a PR.", + "This command is completely optional. To add a new local template, create a folder in {any}/.bun-create/. To publish a new template, git clone https://github.com/oven-sh/bun, add a new folder to the \"examples\" folder, and submit a PR.", .{homedir}, ); } else { @@ -2171,7 +2171,7 @@ const GitHandler = struct { success = std.atomic.Atomic(u32).init(0); thread = std.Thread.spawn(.{}, spawnThread, .{ destination, PATH, verbose }) catch |err| { - Output.prettyErrorln("{s}", .{@errorName(err)}); + Output.prettyErrorln("{any}", .{@errorName(err)}); Global.exit(1); }; } @@ -2233,7 +2233,7 @@ const GitHandler = struct { }; if (comptime verbose) { - Output.prettyErrorln("git backend: {s}", .{git}); + Output.prettyErrorln("git backend: {any}", .{git}); } // same names, just comptime known values diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index 13d15ce15b171d..47457dea17f554 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -46,7 +46,7 @@ pub const InitCommand = struct { ) ![]const u8 { Output.pretty(label, .{}); if (default.len > 0) { - Output.pretty("({s}): ", .{default}); + Output.pretty("({any}): ", .{default}); } Output.flush(); @@ -307,7 +307,7 @@ pub const InitCommand = struct { js_ast.Expr{ .data = .{ .e_object = fields.object }, .loc = logger.Loc.Empty }, &logger.Source.initEmptyFile("package.json"), ) catch |err| { - Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); + Output.prettyErrorln("package.json failed to write due to error {any}", .{@errorName(err)}); package_json_file = null; break :write_package_json; }; @@ -324,7 +324,7 @@ pub const InitCommand = struct { var entry = try std.fs.cwd().createFile(fields.entry_point, .{ .truncate = true }); entry.writeAll("console.log(\"Hello via Bun!\");") catch {}; entry.close(); - Output.prettyln(" + {s}", .{fields.entry_point}); + Output.prettyln(" + {any}", .{fields.entry_point}); Output.flush(); } @@ -349,7 +349,7 @@ pub const InitCommand = struct { var file = std.fs.cwd().createFileZ(filename, .{ .truncate = true }) catch break :brk; defer file.close(); file.writeAll(default_tsconfig) catch break :brk; - Output.prettyln(" + {s} (for editor auto-complete)", .{filename}); + Output.prettyln(" + {any} (for editor auto-complete)", .{filename}); Output.flush(); } } @@ -364,7 +364,7 @@ pub const InitCommand = struct { .bunVersion = Global.version.fmt(""), .entryPoint = fields.entry_point, }) catch break :brk; - Output.prettyln(" + {s}", .{filename}); + Output.prettyln(" + {any}", .{filename}); Output.flush(); } } @@ -377,7 +377,7 @@ pub const InitCommand = struct { )) { Output.prettyln(" bun run {any}", .{JSPrinter.formatJSONString(fields.entry_point)}); } else { - Output.prettyln(" bun run {s}", .{fields.entry_point}); + Output.prettyln(" bun run {any}", .{fields.entry_point}); } } diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig index 3cb992b2de39d2..456f16e2bbb4c2 100644 --- a/src/cli/install_completions_command.zig +++ b/src/cli/install_completions_command.zig @@ -93,14 +93,14 @@ pub const InstallCompletionsCommand = struct { } if (!std.fs.path.isAbsolute(completions_dir)) { - Output.prettyErrorln("error: Please pass an absolute path. {s} is invalid", .{completions_dir}); + Output.prettyErrorln("error: Please pass an absolute path. {any} is invalid", .{completions_dir}); Global.exit(fail_exit_code); } break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true, }) catch |err| { - Output.prettyErrorln("error: accessing {s} errored {s}", .{ completions_dir, @errorName(err) }); + Output.prettyErrorln("error: accessing {any} errored {any}", .{ completions_dir, @errorName(err) }); Global.exit(fail_exit_code); }; } @@ -305,7 +305,7 @@ pub const InstallCompletionsCommand = struct { var output_file = output_dir.createFileZ(filename, .{ .truncate = true, }) catch |err| { - Output.prettyErrorln("error: Could not open {s} for writing: {s}", .{ + Output.prettyErrorln("error: Could not open {any} for writing: {any}", .{ filename, @errorName(err), }); @@ -313,7 +313,7 @@ pub const InstallCompletionsCommand = struct { }; output_file.writeAll(shell.completions()) catch |err| { - Output.prettyErrorln("error: Could not write to {s}: {s}", .{ + Output.prettyErrorln("error: Could not write to {any}: {any}", .{ filename, @errorName(err), }); @@ -394,7 +394,7 @@ pub const InstallCompletionsCommand = struct { // We need to add it to the end of the file var remaining = buf[read..]; - var extra = std.fmt.bufPrint(remaining, "\n# bun completions\n[ -s \"{s}\" ] && source \"{s}\"\n", .{ + var extra = std.fmt.bufPrint(remaining, "\n# bun completions\n[ -s \"{any}\" ] && source \"{any}\"\n", .{ completions_path, completions_path, }) catch unreachable; @@ -406,14 +406,14 @@ pub const InstallCompletionsCommand = struct { }; if (needs_to_tell_them_to_add_completions_file) { - Output.prettyErrorln("To enable completions, add this to your .zshrc:\n [ -s \"{s}\" ] && source \"{s}\"", .{ + Output.prettyErrorln("To enable completions, add this to your .zshrc:\n [ -s \"{any}\" ] && source \"{any}\"", .{ completions_path, completions_path, }); } } - Output.prettyErrorln("Installed completions to {s}/{s}\n", .{ + Output.prettyErrorln("Installed completions to {any}/{any}\n", .{ completions_dir, filename, }); diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 3537c75a7060a5..d81d8cba79f697 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -27,7 +27,7 @@ pub const PackageManagerCommand = struct { if (load_lockfile == .err) { if (pm.options.log_level != .silent) - Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)}); + Output.prettyError("Error loading lockfile: {any}", .{@errorName(load_lockfile.err.value)}); Global.exit(1); } @@ -59,7 +59,7 @@ pub const PackageManagerCommand = struct { if (strings.eqlComptime(first, "bin")) { var output_path = Path.joinAbs(Fs.FileSystem.instance.top_level_dir, .auto, std.mem.span(pm.options.bin_path)); - Output.prettyln("{s}", .{output_path}); + Output.prettyln("{any}", .{output_path}); if (Output.stdout_descriptor_type == .terminal) { Output.prettyln("\n", .{}); } @@ -93,7 +93,7 @@ pub const PackageManagerCommand = struct { if (load_lockfile == .err) { if (pm.options.log_level != .silent) - Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)}); + Output.prettyError("Error loading lockfile: {any}", .{@errorName(load_lockfile.err.value)}); Global.exit(1); } @@ -114,7 +114,7 @@ pub const PackageManagerCommand = struct { if (load_lockfile == .err) { if (pm.options.log_level != .silent) - Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)}); + Output.prettyError("Error loading lockfile: {any}", .{@errorName(load_lockfile.err.value)}); Global.exit(1); } @@ -133,7 +133,7 @@ pub const PackageManagerCommand = struct { if (load_lockfile == .err) { if (pm.options.log_level != .silent) - Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)}); + Output.prettyError("Error loading lockfile: {any}", .{@errorName(load_lockfile.err.value)}); Global.exit(1); } @@ -143,16 +143,16 @@ pub const PackageManagerCommand = struct { var dir: [bun.MAX_PATH_BYTES]u8 = undefined; var fd = pm.getCacheDirectory(); var outpath = std.os.getFdPath(fd.fd, &dir) catch |err| { - Output.prettyErrorln("{s} getting cache directory", .{@errorName(err)}); + Output.prettyErrorln("{any} getting cache directory", .{@errorName(err)}); Global.crash(); }; if (pm.options.positionals.len > 0 and strings.eqlComptime(pm.options.positionals[0], "rm")) { std.fs.deleteTreeAbsolute(outpath) catch |err| { - Output.prettyErrorln("{s} deleting cache directory", .{@errorName(err)}); + Output.prettyErrorln("{any} deleting cache directory", .{@errorName(err)}); Global.crash(); }; - Output.prettyln("Cache directory deleted:\n {s}", .{outpath}); + Output.prettyln("Cache directory deleted:\n {any}", .{outpath}); Global.exit(0); } Output.writer().writeAll(outpath) catch {}; @@ -161,7 +161,7 @@ pub const PackageManagerCommand = struct { Output.prettyln( \\bun pm - package manager related commands - \\ + \\ \\ bun pm bin print the path to bin folder \\ bun pm -g bin print the global path to bin folder \\ bun pm hash generate & print the hash of the current lockfile @@ -173,7 +173,7 @@ pub const PackageManagerCommand = struct { , .{}); if (first.len > 0) { - Output.prettyErrorln("\nerror: \"{s}\" unknown command\n", .{first}); + Output.prettyErrorln("\nerror: \"{any}\" unknown command\n", .{first}); Output.flush(); Global.exit(1); diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 161b1362b03e17..7685f466dd7153 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -82,7 +82,7 @@ pub const RunCommand = struct { } const BUN_BIN_NAME = if (Environment.isDebug) "bun-debug" else "bun"; - const BUN_RUN = std.fmt.comptimePrint("{s} run", .{BUN_BIN_NAME}); + const BUN_RUN = std.fmt.comptimePrint("{any} run", .{BUN_BIN_NAME}); // Look for invocations of any: // - yarn run @@ -244,7 +244,7 @@ pub const RunCommand = struct { var child_process = std.ChildProcess.init(&argv, allocator); if (!silent) { - Output.prettyErrorln("$ {s}", .{combined_script}); + Output.prettyErrorln("$ {any}", .{combined_script}); Output.flush(); } @@ -257,13 +257,13 @@ pub const RunCommand = struct { child_process.stdout_behavior = .Inherit; const result = child_process.spawnAndWait() catch |err| { - Output.prettyErrorln("error: Failed to run script {s} due to error {s}", .{ name, @errorName(err) }); + Output.prettyErrorln("error: Failed to run script {any} due to error {any}", .{ name, @errorName(err) }); Output.flush(); return true; }; if (result.Exited > 0) { - Output.prettyErrorln("Script error \"{s}\" exited with {d} status", .{ name, result.Exited }); + Output.prettyErrorln("Script error \"{any}\" exited with {d} status", .{ name, result.Exited }); Output.flush(); Global.exit(result.Exited); @@ -304,30 +304,30 @@ pub const RunCommand = struct { const rc = bun.C.stat(std.meta.assumeSentinel(executable, 0), &stat); if (rc == 0) { if (std.os.S.ISDIR(stat.mode)) { - Output.prettyErrorln("error: Failed to run directory \"{s}\"\n", .{executable}); + Output.prettyErrorln("error: Failed to run directory \"{any}\"\n", .{executable}); Global.exit(1); } } } } - Output.prettyErrorln("error: Failed to run \"{s}\" due to error {s}", .{ std.fs.path.basename(executable), @errorName(err) }); + Output.prettyErrorln("error: Failed to run \"{any}\" due to error {any}", .{ std.fs.path.basename(executable), @errorName(err) }); Global.exit(1); }; switch (result) { .Exited => |code| { - Output.prettyErrorln("error \"{s}\" exited with {d} status", .{ std.fs.path.basename(executable), code }); + Output.prettyErrorln("error \"{any}\" exited with {d} status", .{ std.fs.path.basename(executable), code }); Global.exit(code); }, .Signal => |sig| { - Output.prettyErrorln("error \"{s}\" signaled {d}", .{ std.fs.path.basename(executable), sig }); + Output.prettyErrorln("error \"{any}\" signaled {d}", .{ std.fs.path.basename(executable), sig }); Global.exit(1); }, .Stopped => |sig| { - Output.prettyErrorln("error \"{s}\" stopped: {d}", .{ std.fs.path.basename(executable), sig }); + Output.prettyErrorln("error \"{any}\" stopped: {d}", .{ std.fs.path.basename(executable), sig }); Global.exit(1); }, .Unknown => |sig| { - Output.prettyErrorln("error \"{s}\" stopped: {d}", .{ std.fs.path.basename(executable), sig }); + Output.prettyErrorln("error \"{any}\" stopped: {d}", .{ std.fs.path.basename(executable), sig }); Global.exit(1); }, } @@ -379,7 +379,7 @@ pub const RunCommand = struct { } else { ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; } - Output.prettyErrorln("Error loading directory: \"{s}\"", .{@errorName(err)}); + Output.prettyErrorln("Error loading directory: \"{any}\"", .{@errorName(err)}); Output.flush(); return err; } orelse { @@ -761,7 +761,7 @@ pub const RunCommand = struct { // "White space after #! is optional." var shebang_buf: [64]u8 = undefined; const shebang_size = file.pread(&shebang_buf, 0) catch |err| { - Output.prettyErrorln("error: Failed to read file {s} due to error {s}", .{ file_path, @errorName(err) }); + Output.prettyErrorln("error: Failed to read file {any} due to error {any}", .{ file_path, @errorName(err) }); Global.exit(1); }; @@ -786,7 +786,7 @@ pub const RunCommand = struct { ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), false) catch {}; } - Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ + Output.prettyErrorln("error: Failed to run {any} due to error {any}", .{ std.fs.path.basename(file_path), @errorName(err), }); @@ -824,11 +824,11 @@ pub const RunCommand = struct { if (scripts.count() > 0) { did_print = true; - Output.prettyln("{s} scripts:\n", .{display_name}); + Output.prettyln("{any} scripts:\n", .{display_name}); while (iterator.next()) |entry| { Output.prettyln("\n", .{}); - Output.prettyln(" bun run {s}\n", .{entry.key_ptr.*}); - Output.prettyln(" {s}\n", .{entry.value_ptr.*}); + Output.prettyln(" bun run {any}\n", .{entry.key_ptr.*}); + Output.prettyln(" {any}\n", .{entry.value_ptr.*}); } Output.prettyln("\n{d} scripts", .{scripts.count()}); @@ -837,7 +837,7 @@ pub const RunCommand = struct { return true; } else { - Output.prettyln("{s} has no \"scripts\" in package.json.", .{display_name}); + Output.prettyln("{any} has no \"scripts\" in package.json.", .{display_name}); Output.flush(); return true; } @@ -845,7 +845,7 @@ pub const RunCommand = struct { else => { if (scripts.get(script_name_to_search)) |script_content| { // allocate enough to hold "post${scriptname}" - var temp_script_buffer = try std.fmt.allocPrint(ctx.allocator, "ppre{s}", .{script_name_to_search}); + var temp_script_buffer = try std.fmt.allocPrint(ctx.allocator, "ppre{any}", .{script_name_to_search}); if (scripts.get(temp_script_buffer[1..])) |prescript| { if (!try runPackageScript( @@ -918,13 +918,13 @@ pub const RunCommand = struct { // var file = std.fs.openFileAbsoluteZ(destination, .{ .mode = .read_only }) catch |err| { // if (!log_errors) return false; - // Output.prettyErrorln("error: {s} opening file: \"{s}\"", .{ err, std.mem.span(destination) }); + // Output.prettyErrorln("error: {any} opening file: \"{any}\"", .{ err, std.mem.span(destination) }); // Output.flush(); // return err; // }; // // var outbuf = std.os.getFdPath(file.handle, &path_buf2) catch |err| { // // if (!log_errors) return false; - // // Output.prettyErrorln("error: {s} resolving file: \"{s}\"", .{ err, std.mem.span(destination) }); + // // Output.prettyErrorln("error: {any} resolving file: \"{any}\"", .{ err, std.mem.span(destination) }); // // Output.flush(); // // return err; // // }; @@ -943,7 +943,7 @@ pub const RunCommand = struct { } if (comptime log_errors) { - Output.prettyError("error: Missing script \"{s}\"\n", .{script_name_to_search}); + Output.prettyError("error: Missing script \"{any}\"\n", .{script_name_to_search}); Global.exit(0); } diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index ac210dfe2b9dbf..00f2d5802d2ea3 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -109,9 +109,9 @@ pub const CommandLineReporter = struct { } if (Output.enable_ansi_colors_stderr) - writer.print(comptime Output.prettyFmt(" {s}", true), .{display_label}) catch unreachable + writer.print(comptime Output.prettyFmt(" {any}", true), .{display_label}) catch unreachable else - writer.print(comptime Output.prettyFmt(" {s}", false), .{display_label}) catch unreachable; + writer.print(comptime Output.prettyFmt(" {any}", false), .{display_label}) catch unreachable; writer.writeAll("\n") catch unreachable; } @@ -388,7 +388,7 @@ pub const TestCommand = struct { if (reporter.summary.expectations > 0) Output.prettyError(" {d:5>} expect() calls\n", .{reporter.summary.expectations}); Output.prettyError( - \\ Ran {d} tests across {d} files + \\ Ran {d} tests across {d} files , .{ reporter.summary.fail + reporter.summary.pass, test_files.len, @@ -466,7 +466,7 @@ pub const TestCommand = struct { var resolution = try vm.bundler.resolveEntryPoint(file_name); vm.clearEntryPoint(); - Output.prettyErrorln("\n{s}:\n", .{resolution.path_pair.primary.name.filename}); + Output.prettyErrorln("\n{any}:\n", .{resolution.path_pair.primary.name.filename}); Output.flush(); var promise = try vm.loadEntryPoint(resolution.path_pair.primary.text); diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index 28d323287ee581..c719a96ded6b97 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -92,7 +92,7 @@ pub const Version = struct { const current_version: string = "bun-v" ++ Global.package_json_version; - pub export const Bun__githubURL: [*:0]const u8 = std.fmt.comptimePrint("https://github.com/oven-sh/bun/release/bun-v{s}/{s}", .{ + pub export const Bun__githubURL: [*:0]const u8 = std.fmt.comptimePrint("https://github.com/oven-sh/bun/release/bun-v{any}/{any}", .{ Global.package_json_version, zip_filename, }); @@ -133,7 +133,7 @@ pub const UpgradeCheckerThread = struct { if (!version.isCurrent()) { if (version.name()) |name| { - Output.prettyErrorln("\nbun v{s} is out. Run bun upgrade to upgrade.\n", .{name}); + Output.prettyErrorln("\nbun v{any} is out. Run bun upgrade to upgrade.\n", .{name}); Output.flush(); } } @@ -144,7 +144,7 @@ pub const UpgradeCheckerThread = struct { fn run(env_loader: *DotEnv.Loader) void { _run(env_loader) catch |err| { if (Environment.isDebug) { - std.debug.print("\n[UpgradeChecker] ERROR: {s}\n", .{@errorName(err)}); + std.debug.print("\n[UpgradeChecker] ERROR: {any}\n", .{@errorName(err)}); } }; } @@ -193,7 +193,7 @@ pub const UpgradeCommand = struct { var api_url = URL.parse( try std.fmt.bufPrint( &github_repository_url_buf, - "https://{s}/repos/Jarred-Sumner/bun-releases-for-updater/releases/latest", + "https://{any}/repos/Jarred-Sumner/bun-releases-for-updater/releases/latest", .{ github_api_domain, }, @@ -202,7 +202,7 @@ pub const UpgradeCommand = struct { if (env_loader.map.get("GITHUB_ACCESS_TOKEN")) |access_token| { if (access_token.len > 0) { - headers_buf = try std.fmt.allocPrint(allocator, default_github_headers ++ "Access-TokenBearer {s}", .{access_token}); + headers_buf = try std.fmt.allocPrint(allocator, default_github_headers ++ "Access-TokenBearer {any}", .{access_token}); try header_entries.append( allocator, Headers.Kv{ @@ -262,7 +262,7 @@ pub const UpgradeCommand = struct { } Global.exit(1); } else { - Output.prettyErrorln("Error parsing releases from GitHub: {s}", .{@errorName(err)}); + Output.prettyErrorln("Error parsing releases from GitHub: {any}", .{@errorName(err)}); Global.exit(1); } } @@ -294,7 +294,7 @@ pub const UpgradeCommand = struct { refresher.refresh(); const json_type: js_ast.Expr.Tag = @as(js_ast.Expr.Tag, expr.data); - Output.prettyErrorln("JSON error - expected an object but received {s}", .{@tagName(json_type)}); + Output.prettyErrorln("JSON error - expected an object but received {any}", .{@tagName(json_type)}); Global.exit(1); } @@ -312,7 +312,7 @@ pub const UpgradeCommand = struct { progress.end(); refresher.refresh(); - Output.prettyErrorln("JSON Error parsing releases from GitHub: tag_name is missing?\n{s}", .{metadata_body.list.items}); + Output.prettyErrorln("JSON Error parsing releases from GitHub: tag_name is missing?\n{any}", .{metadata_body.list.items}); Global.exit(1); } @@ -327,7 +327,7 @@ pub const UpgradeCommand = struct { if (asset.asProperty("content_type")) |content_type| { const content_type_ = (content_type.expr.asString(allocator)) orelse continue; if (comptime Environment.isDebug) { - Output.prettyln("Content-type: {s}", .{content_type_}); + Output.prettyln("Content-type: {any}", .{content_type_}); Output.flush(); } @@ -338,7 +338,7 @@ pub const UpgradeCommand = struct { if (name_.expr.asString(allocator)) |name| { if (comptime Environment.isDebug) { const filename = if (!use_profile) Version.zip_filename else Version.profile_zip_filename; - Output.prettyln("Comparing {s} vs {s}", .{ name, filename }); + Output.prettyln("Comparing {any} vs {any}", .{ name, filename }); Output.flush(); } @@ -347,7 +347,7 @@ pub const UpgradeCommand = struct { version.zip_url = (asset.asProperty("browser_download_url") orelse break :get_asset).expr.asString(allocator) orelse break :get_asset; if (comptime Environment.isDebug) { - Output.prettyln("Found Zip {s}", .{version.zip_url}); + Output.prettyln("Found Zip {any}", .{version.zip_url}); Output.flush(); } @@ -366,7 +366,7 @@ pub const UpgradeCommand = struct { progress.end(); refresher.refresh(); if (version.name()) |name| { - Output.prettyErrorln("bun v{s} is out, but not for this platform ({s}) yet.", .{ + Output.prettyErrorln("bun v{any} is out, but not for this platform ({any}) yet.", .{ name, Version.triplet, }); } @@ -383,7 +383,7 @@ pub const UpgradeCommand = struct { @setCold(true); _exec(ctx) catch |err| { - Output.prettyErrorln("bun upgrade failed with error: {s}\n\nPlease upgrade manually:\n curl -fsSL https://bun.sh/install | bash\n\n", .{@errorName(err)}); + Output.prettyErrorln("bun upgrade failed with error: {any}\n\nPlease upgrade manually:\n curl -fsSL https://bun.sh/install | bash\n\n", .{@errorName(err)}); Global.exit(1); }; } @@ -425,7 +425,7 @@ pub const UpgradeCommand = struct { if (version.name() != null and version.isCurrent()) { Output.prettyErrorln( - "Congrats! You're already on the latest version of bun (which is v{s})", + "Congrats! You're already on the latest version of bun (which is v{any})", .{ version.name().?, }, @@ -441,7 +441,7 @@ pub const UpgradeCommand = struct { Global.exit(1); } - Output.prettyErrorln("bun v{s} is out! You're on {s}\n", .{ version.name().?, Global.package_json_version }); + Output.prettyErrorln("bun v{any} is out! You're on {any}\n", .{ version.name().?, Global.package_json_version }); Output.flush(); } else { version = Version{ @@ -481,7 +481,7 @@ pub const UpgradeCommand = struct { \\error: Canary builds are not available for this platform yet \\ \\ Release: https://github.com/oven-sh/bun/releases/tag/canary - \\ Filename: {s} + \\ Filename: {any} \\ , .{ Version.zip_filename, @@ -529,14 +529,14 @@ pub const UpgradeCommand = struct { if (use_profile) profile_exe_subpath else exe_subpath; var zip_file = save_dir.createFileZ(tmpname, .{ .truncate = true }) catch |err| { - Output.prettyErrorln("error: Failed to open temp file {s}", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to open temp file {any}", .{@errorName(err)}); Global.exit(1); }; { _ = zip_file.writeAll(bytes) catch |err| { save_dir.deleteFileZ(tmpname) catch {}; - Output.prettyErrorln("error: Failed to write to temp file {s}", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to write to temp file {any}", .{@errorName(err)}); Global.exit(1); }; zip_file.close(); @@ -572,7 +572,7 @@ pub const UpgradeCommand = struct { const unzip_result = unzip_process.spawnAndWait() catch |err| { save_dir.deleteFileZ(tmpname) catch {}; - Output.prettyErrorln("error: Failed to spawn unzip due to {s}.", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to spawn unzip due to {any}.", .{@errorName(err)}); Global.exit(1); }; @@ -595,7 +595,7 @@ pub const UpgradeCommand = struct { .max_output_bytes = 512, }) catch |err| { save_dir_.deleteTree(version_name) catch {}; - Output.prettyErrorln("error Failed to verify bun {s})", .{@errorName(err)}); + Output.prettyErrorln("error Failed to verify bun {any})", .{@errorName(err)}); Global.exit(1); }; @@ -617,7 +617,7 @@ pub const UpgradeCommand = struct { save_dir_.deleteTree(version_name) catch {}; Output.prettyErrorln( - "error: The downloaded version of bun ({s}) doesn't match the expected version ({s}). Cancelled upgrade", + "error: The downloaded version of bun ({any}) doesn't match the expected version ({any}). Cancelled upgrade", .{ version_string[0..@min(version_string.len, 512)], version_name, @@ -639,7 +639,7 @@ pub const UpgradeCommand = struct { var target_dirname = current_executable_buf[0..target_dir_.len :0]; var target_dir = std.fs.openDirAbsoluteZ(target_dirname, .{ .iterate = true }) catch |err| { save_dir_.deleteTree(version_name) catch {}; - Output.prettyErrorln("error: Failed to open bun's install directory {s}", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to open bun's install directory {any}", .{@errorName(err)}); Global.exit(1); }; @@ -648,13 +648,13 @@ pub const UpgradeCommand = struct { // Check if the versions are the same const target_stat = target_dir.statFile(target_filename) catch |err| { save_dir_.deleteTree(version_name) catch {}; - Output.prettyErrorln("error: Failed to stat target bun {s}", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to stat target bun {any}", .{@errorName(err)}); Global.exit(1); }; const dest_stat = save_dir.statFile(exe) catch |err| { save_dir_.deleteTree(version_name) catch {}; - Output.prettyErrorln("error: Failed to stat source bun {s}", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to stat source bun {any}", .{@errorName(err)}); Global.exit(1); }; @@ -663,13 +663,13 @@ pub const UpgradeCommand = struct { const target_hash = std.hash.Wyhash.hash(0, target_dir.readFile(target_filename, input_buf) catch |err| { save_dir_.deleteTree(version_name) catch {}; - Output.prettyErrorln("error: Failed to read target bun {s}", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to read target bun {any}", .{@errorName(err)}); Global.exit(1); }); const source_hash = std.hash.Wyhash.hash(0, save_dir.readFile(exe, input_buf) catch |err| { save_dir_.deleteTree(version_name) catch {}; - Output.prettyErrorln("error: Failed to read source bun {s}", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to read source bun {any}", .{@errorName(err)}); Global.exit(1); }); @@ -687,7 +687,7 @@ pub const UpgradeCommand = struct { if (env_loader.map.get("BUN_DRY_RUN") == null) { C.moveFileZ(save_dir.fd, exe, target_dir.fd, target_filename) catch |err| { save_dir_.deleteTree(version_name) catch {}; - Output.prettyErrorln("error: Failed to move new version of bun due to {s}. You could try the install script instead:\n curl -fsSL https://bun.sh/install | bash", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to move new version of bun due to {any}. You could try the install script instead:\n curl -fsSL https://bun.sh/install | bash", .{@errorName(err)}); Global.exit(1); }; } @@ -724,7 +724,7 @@ pub const UpgradeCommand = struct { \\ \\Changelog: \\ - \\ https://github.com/oven-sh/bun/compare/{s}...main + \\ https://github.com/oven-sh/bun/compare/{any}...main \\ , .{Environment.git_sha}, @@ -735,7 +735,7 @@ pub const UpgradeCommand = struct { Output.prettyErrorln( \\ Upgraded. \\ - \\Welcome to bun v{s}! + \\Welcome to bun v{any}! \\ \\Report any bugs: \\ @@ -743,11 +743,11 @@ pub const UpgradeCommand = struct { \\ \\What's new: \\ - \\ https://github.com/oven-sh/bun/releases/tag/{s} + \\ https://github.com/oven-sh/bun/releases/tag/{any} \\ \\Changelog: \\ - \\ https://github.com/oven-sh/bun/compare/{s}...{s} + \\ https://github.com/oven-sh/bun/compare/{any}...{any} \\ , .{ version_name, version.tag, bun_v, version.tag }, diff --git a/src/comptime_string_map.zig b/src/comptime_string_map.zig index 18b06d9dea9edb..1c159d2e8cad17 100644 --- a/src/comptime_string_map.zig +++ b/src/comptime_string_map.zig @@ -390,14 +390,14 @@ const TestEnum2 = enum { pub fn compareString(input: []const u8) !void { var str = try std.heap.page_allocator.dupe(u8, input); if (TestEnum2.map.has(str) != TestEnum2.official.has(str)) { - std.debug.panic("{s} - TestEnum2.map.has(str) ({d}) != TestEnum2.official.has(str) ({d})", .{ + std.debug.panic("{any} - TestEnum2.map.has(str) ({d}) != TestEnum2.official.has(str) ({d})", .{ str, @boolToInt(TestEnum2.map.has(str)), @boolToInt(TestEnum2.official.has(str)), }); } - std.debug.print("For string: \"{s}\" (has a match? {d})\n", .{ str, @boolToInt(TestEnum2.map.has(str)) }); + std.debug.print("For string: \"{any}\" (has a match? {d})\n", .{ str, @boolToInt(TestEnum2.map.has(str)) }); var i: usize = 0; var is_eql = false; diff --git a/src/css_scanner.zig b/src/css_scanner.zig index 2a56d818cf1b61..437faebfd98a54 100644 --- a/src/css_scanner.zig +++ b/src/css_scanner.zig @@ -1020,7 +1020,7 @@ pub fn NewWriter( writer.source, chunk.range, writer.buildCtx.allocator, - "Not Found - \"{s}\"", + "Not Found - \"{any}\"", .{import.text.utf8}, import_record.ImportKind.at, err, diff --git a/src/defines.zig b/src/defines.zig index a0c885f34e94d6..ce34102043aac3 100644 --- a/src/defines.zig +++ b/src/defines.zig @@ -79,9 +79,9 @@ pub const DefineData = struct { while (splitter.next()) |part| { if (!js_lexer.isIdentifier(part)) { if (strings.eql(part, entry.key_ptr)) { - try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{s}\" must be a valid identifier", .{entry.key_ptr.*}); + try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{any}\" must be a valid identifier", .{entry.key_ptr.*}); } else { - try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.value_ptr.* }); + try log.addErrorFmt(null, logger.Loc{}, allocator, "define key \"{any}\" contains invalid identifier \"{any}\"", .{ part, entry.value_ptr.* }); } break; } diff --git a/src/deps/boringssl.translated.zig b/src/deps/boringssl.translated.zig index 267fb639c6e6f0..d0ce2236fee5d9 100644 --- a/src/deps/boringssl.translated.zig +++ b/src/deps/boringssl.translated.zig @@ -18836,7 +18836,7 @@ pub const SSL = opaque { SSL_ERROR_SSL => { if (comptime Environment.isDebug) { const errdescription = std.mem.span(SSL_error_description(SSL_ERROR_SSL).?); - Output.prettyError("SSL_ERROR: {s}", .{errdescription}); + Output.prettyError("SSL_ERROR: {any}", .{errdescription}); } return error.SSL; }, diff --git a/src/deps/picohttp.zig b/src/deps/picohttp.zig index 33f305608628cd..115cc976b20524 100644 --- a/src/deps/picohttp.zig +++ b/src/deps/picohttp.zig @@ -21,15 +21,15 @@ pub const Header = struct { pub fn format(self: Header, comptime _: []const u8, _: fmt.FormatOptions, writer: anytype) !void { if (Output.enable_ansi_colors) { if (self.isMultiline()) { - try fmt.format(writer, comptime Output.prettyFmt("{s}", true), .{self.value}); + try fmt.format(writer, comptime Output.prettyFmt("{any}", true), .{self.value}); } else { - try fmt.format(writer, comptime Output.prettyFmt("{s}: {s}", true), .{ self.name, self.value }); + try fmt.format(writer, comptime Output.prettyFmt("{any}: {any}", true), .{ self.name, self.value }); } } else { if (self.isMultiline()) { - try fmt.format(writer, comptime Output.prettyFmt("{s}", false), .{self.value}); + try fmt.format(writer, comptime Output.prettyFmt("{any}", false), .{self.value}); } else { - try fmt.format(writer, comptime Output.prettyFmt("{s}: {s}", false), .{ self.name, self.value }); + try fmt.format(writer, comptime Output.prettyFmt("{any}: {any}", false), .{ self.name, self.value }); } } } @@ -74,10 +74,10 @@ pub const Request = struct { } pub fn format(self: Request, comptime _: []const u8, _: fmt.FormatOptions, writer: anytype) !void { - try fmt.format(writer, "{s} {s}\n", .{ self.method, self.path }); + try fmt.format(writer, "{any} {any}\n", .{ self.method, self.path }); for (self.headers) |header| { _ = try writer.write("\t"); - try fmt.format(writer, "{s}\n", .{header}); + try fmt.format(writer, "{any}\n", .{header}); } } @@ -125,10 +125,10 @@ pub const Response = struct { bytes_read: c_int = 0, pub fn format(self: Response, comptime _: []const u8, _: fmt.FormatOptions, writer: anytype) !void { - try fmt.format(writer, "< {d} {s}\n", .{ self.status_code, self.status }); + try fmt.format(writer, "< {d} {any}\n", .{ self.status_code, self.status }); for (self.headers) |header| { _ = try writer.write("< \t"); - try fmt.format(writer, "{s}\n", .{header}); + try fmt.format(writer, "{any}\n", .{header}); } } @@ -173,7 +173,7 @@ pub const Response = struct { return switch (rc) { -1 => if (comptime Environment.allow_assert) brk: { - Output.debug("Malformed HTTP response:\n{s}", .{buf}); + Output.debug("Malformed HTTP response:\n{any}", .{buf}); break :brk error.Malformed_HTTP_Response; } else error.Malformed_HTTP_Response, -2 => brk: { @@ -215,7 +215,7 @@ test "pico_http: parse response" { std.debug.print("Minor Version: {}\n", .{res.minor_version}); std.debug.print("Status Code: {}\n", .{res.status_code}); - std.debug.print("Status: {s}\n", .{res.status}); + std.debug.print("Status: {any}\n", .{res.status}); for (res.headers) |header| { std.debug.print("{}\n", .{header}); @@ -227,7 +227,7 @@ pub const Headers = struct { pub fn format(self: Headers, comptime _: []const u8, _: fmt.FormatOptions, writer: anytype) !void { for (self.headers) |header| { - try fmt.format(writer, "{s}: {s}\r\n", .{ header.name, header.value }); + try fmt.format(writer, "{any}: {any}\r\n", .{ header.name, header.value }); } } diff --git a/src/deps/zig-clap/clap.zig b/src/deps/zig-clap/clap.zig index eb1a5e4520988e..17ce0c7da2a5f1 100644 --- a/src/deps/zig-clap/clap.zig +++ b/src/deps/zig-clap/clap.zig @@ -221,13 +221,13 @@ pub const Diagnostic = struct { Arg{ .prefix = "", .name = diag.arg }; switch (err) { - error.DoesntTakeValue => try stream.print("The argument '{s}{s}' does not take a value\n", .{ a.prefix, a.name }), - error.MissingValue => try stream.print("The argument '{s}{s}' requires a value but none was supplied\n", .{ a.prefix, a.name }), + error.DoesntTakeValue => try stream.print("The argument '{any}{any}' does not take a value\n", .{ a.prefix, a.name }), + error.MissingValue => try stream.print("The argument '{any}{any}' requires a value but none was supplied\n", .{ a.prefix, a.name }), error.InvalidArgument => if (a.prefix.len > 0 and a.name.len > 0) - try stream.print("Invalid argument '{s}{s}'\n", .{ a.prefix, a.name }) + try stream.print("Invalid argument '{any}{any}'\n", .{ a.prefix, a.name }) else try stream.print("Failed to parse argument due to unexpected single dash\n", .{}), - else => try stream.print("Error while parsing arguments: {s}\n", .{@errorName(err)}), + else => try stream.print("Error while parsing arguments: {any}\n", .{@errorName(err)}), } } }; @@ -357,7 +357,7 @@ pub fn helpFull( try stream.print("\t", .{}); try printParam(cs.writer(), Id, param, Error, context, valueText); try stream.writeByteNTimes(' ', max_spacing - @intCast(usize, cs.bytes_written)); - try stream.print("\t{s}\n", .{try helpText(context, param)}); + try stream.print("\t{any}\n", .{try helpText(context, param)}); } } @@ -381,14 +381,14 @@ fn printParam( try stream.print(" ", .{}); } - try stream.print("--{s}", .{l}); + try stream.print("--{any}", .{l}); } switch (param.takes_value) { .none => {}, - .one => try stream.print(" <{s}>", .{valueText(context, param)}), - .one_optional => try stream.print(" <{s}>?", .{valueText(context, param)}), - .many => try stream.print(" <{s}>...", .{valueText(context, param)}), + .one => try stream.print(" <{any}>", .{valueText(context, param)}), + .one_optional => try stream.print(" <{any}>?", .{valueText(context, param)}), + .many => try stream.print(" <{any}>...", .{valueText(context, param)}), } } @@ -489,12 +489,12 @@ pub fn usageFull( if (cos.bytes_written != 0) try cs.writeByte(' '); - try cs.print("[{s}{s}", .{ prefix, name }); + try cs.print("[{any}{any}", .{ prefix, name }); switch (param.takes_value) { .none => {}, - .one => try cs.print(" <{s}>", .{try valueText(context, param)}), - .one_optional => try cs.print(" <{s}>?", .{try valueText(context, param)}), - .many => try cs.print(" <{s}>...", .{try valueText(context, param)}), + .one => try cs.print(" <{any}>", .{try valueText(context, param)}), + .one_optional => try cs.print(" <{any}>?", .{try valueText(context, param)}), + .many => try cs.print(" <{any}>...", .{try valueText(context, param)}), } try cs.writeByte(']'); @@ -503,7 +503,7 @@ pub fn usageFull( if (positional) |p| { if (cos.bytes_written != 0) try cs.writeByte(' '); - try cs.print("<{s}>", .{try valueText(context, p)}); + try cs.print("<{any}>", .{try valueText(context, p)}); } } diff --git a/src/deps/zig-datetime/src/datetime.zig b/src/deps/zig-datetime/src/datetime.zig index 578356fa815ec4..fa74d5a8a309af 100644 --- a/src/deps/zig-datetime/src/datetime.zig +++ b/src/deps/zig-datetime/src/datetime.zig @@ -1347,7 +1347,7 @@ pub const Datetime = struct { // Formats a timestamp in the format used by HTTP. // eg "Tue, 15 Nov 1994 08:12:31 GMT" pub fn formatHttp(self: Datetime, allocator: Allocator) ![]const u8 { - return try std.fmt.allocPrint(allocator, "{s}, {d} {s} {d} {d:0>2}:{d:0>2}:{d:0>2} GMT", .{ + return try std.fmt.allocPrint(allocator, "{any}, {d} {any} {d} {d:0>2}:{d:0>2}:{d:0>2} GMT", .{ self.date.weekdayName()[0..3], self.date.day, self.date.monthName()[0..3], @@ -1359,7 +1359,7 @@ pub const Datetime = struct { } pub fn formatHttpBuf(self: Datetime, buf: []u8) ![]const u8 { - return try std.fmt.bufPrint(buf, "{s}, {d} {s} {d} {d:0>2}:{d:0>2}:{d:0>2} GMT", .{ + return try std.fmt.bufPrint(buf, "{any}, {d} {any} {d} {d:0>2}:{d:0>2}:{d:0>2} GMT", .{ self.date.weekdayName()[0..3], self.date.day, self.date.monthName()[0..3], @@ -1551,7 +1551,7 @@ test "file-modified-date" { var stat = try f.stat(); var buf: [32]u8 = undefined; var str = try Datetime.formatHttpFromModifiedDate(&buf, stat.mtime); - std.log.warn("Modtime: {s}\n", .{str}); + std.log.warn("Modtime: {any}\n", .{str}); } test "readme-example" { @@ -1566,7 +1566,7 @@ test "readme-example" { const now = Datetime.now(); const now_str = try now.formatHttp(allocator); defer allocator.free(now_str); - std.log.warn("The time is now: {s}\n", .{now_str}); + std.log.warn("The time is now: {any}\n", .{now_str}); // The time is now: Fri, 20 Dec 2019 22:03:02 UTC } diff --git a/src/env_loader.zig b/src/env_loader.zig index 4a37a586fa868c..5363a443e1ad0c 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -534,7 +534,7 @@ pub const Loader = struct { const value: string = entry.value_ptr.*; if (strings.startsWith(entry.key_ptr.*, prefix)) { - const key_str = std.fmt.allocPrint(key_allocator, "process.env.{s}", .{entry.key_ptr.*}) catch unreachable; + const key_str = std.fmt.allocPrint(key_allocator, "process.env.{any}", .{entry.key_ptr.*}) catch unreachable; e_strings[0] = js_ast.E.String{ .data = if (value.len > 0) @@ -583,7 +583,7 @@ pub const Loader = struct { } else { while (iter.next()) |entry| { const value: string = if (entry.value_ptr.*.len == 0) empty_string_value else entry.value_ptr.*; - const key = std.fmt.allocPrint(key_allocator, "process.env.{s}", .{entry.key_ptr.*}) catch unreachable; + const key = std.fmt.allocPrint(key_allocator, "process.env.{any}", .{entry.key_ptr.*}) catch unreachable; e_strings[0] = js_ast.E.String{ .data = if (entry.value_ptr.*.len > 0) @@ -720,9 +720,9 @@ pub const Loader = struct { if (yes) { loaded_i += 1; if (count == 1 or (loaded_i >= count and count > 1)) { - Output.prettyError("\"{s}\"", .{all[i]}); + Output.prettyError("\"{any}\"", .{all[i]}); } else { - Output.prettyError("\"{s}\", ", .{all[i]}); + Output.prettyError("\"{any}\", ", .{all[i]}); } } } @@ -744,7 +744,7 @@ pub const Loader = struct { }, error.FileBusy, error.DeviceBusy, error.AccessDenied, error.IsDir => { if (!this.quiet) { - Output.prettyErrorln("{s} error loading {s} file", .{ @errorName(err), base }); + Output.prettyErrorln("{any} error loading {any} file", .{ @errorName(err), base }); } // prevent retrying diff --git a/src/fs.zig b/src/fs.zig index 9cc2a3170e9fb9..0dbb28a31d98e1 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -67,7 +67,7 @@ pub const BytecodeCacheFetcher = struct { this.fd = @truncate(StoredFileDescriptorType, cache_file.handle); return @truncate(StoredFileDescriptorType, cache_file.handle); } else |err| { - Output.prettyWarnln("Warn: Bytecode caching unavailable due to error: {s}", .{@errorName(err)}); + Output.prettyWarnln("Warn: Bytecode caching unavailable due to error: {any}", .{@errorName(err)}); Output.flush(); this.fd = 0; return null; @@ -117,7 +117,7 @@ pub const FileSystem = struct { pub fn tmpname(_: *const FileSystem, extname: string, buf: []u8, hash: u64) ![*:0]u8 { // PRNG was...not so random - return try std.fmt.bufPrintZ(buf, ".{x}{s}", .{ @truncate(u64, @intCast(u128, hash) * @intCast(u128, std.time.nanoTimestamp())), extname }); + return try std.fmt.bufPrintZ(buf, ".{x}{any}", .{ @truncate(u64, @intCast(u128, hash) * @intCast(u128, std.time.nanoTimestamp())), extname }); } pub var max_fd: FileDescriptorType = 0; @@ -256,16 +256,16 @@ pub const FileSystem = struct { if (comptime FeatureFlags.verbose_fs) { if (_kind == .dir) { - Output.prettyln(" + {s}/", .{stored_name}); + Output.prettyln(" + {any}/", .{stored_name}); } else { - Output.prettyln(" + {s}", .{stored_name}); + Output.prettyln(" + {any}", .{stored_name}); } } } pub fn init(dir: string) DirEntry { if (comptime FeatureFlags.verbose_fs) { - Output.prettyln("\n {s}", .{dir}); + Output.prettyln("\n {any}", .{dir}); } return DirEntry{ .dir = dir, .data = EntryMap{} }; @@ -729,7 +729,7 @@ pub const FileSystem = struct { ) !string { return try std.fmt.bufPrint( &hash_name_buf, - "{s}-{x}", + "{any}-{x}", .{ basename, this.hash(), @@ -1285,7 +1285,7 @@ pub const Path = struct { } pub fn generateKey(p: *Path, allocator: std.mem.Allocator) !string { - return try std.fmt.allocPrint(allocator, "{s}://{s}", .{ p.namespace, p.text }); + return try std.fmt.allocPrint(allocator, "{any}://{any}", .{ p.namespace, p.text }); } pub fn init(text: string) Path { diff --git a/src/http.zig b/src/http.zig index 41e3f8af30f178..d2d86121e74f77 100644 --- a/src/http.zig +++ b/src/http.zig @@ -238,14 +238,14 @@ pub const RequestContext = struct { display_port = "80"; } } - this.origin = ZigURL.parse(std.fmt.allocPrint(this.allocator, "{s}://{s}:{s}/", .{ display_protocol, display_host, display_port }) catch unreachable); + this.origin = ZigURL.parse(std.fmt.allocPrint(this.allocator, "{any}://{any}:{any}/", .{ display_protocol, display_host, display_port }) catch unreachable); } } pub fn getFullURL(this: *RequestContext) [:0]const u8 { if (this.full_url.len == 0) { if (this.origin.isAbsolute()) { - this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}{s}", .{ this.origin.origin, this.request.path }) catch unreachable; + this.full_url = std.fmt.allocPrintZ(this.allocator, "{any}{any}", .{ this.origin.origin, this.request.path }) catch unreachable; } else { this.full_url = this.allocator.dupeZ(u8, this.request.path) catch unreachable; } @@ -257,9 +257,9 @@ pub const RequestContext = struct { pub fn getFullURLForSourceMap(this: *RequestContext) [:0]const u8 { if (this.full_url.len == 0) { if (this.origin.isAbsolute()) { - this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}{s}.map", .{ this.origin.origin, this.request.path }) catch unreachable; + this.full_url = std.fmt.allocPrintZ(this.allocator, "{any}{any}.map", .{ this.origin.origin, this.request.path }) catch unreachable; } else { - this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}.map", .{this.request.path}) catch unreachable; + this.full_url = std.fmt.allocPrintZ(this.allocator, "{any}.map", .{this.request.path}) catch unreachable; } } @@ -378,7 +378,7 @@ pub const RequestContext = struct { if (this.bundler.options.node_modules_bundle_url.len > 0) { add_preload: { - const node_modules_preload_header_value = std.fmt.bufPrint(remaining, "<{s}>; rel=modulepreload", .{ + const node_modules_preload_header_value = std.fmt.bufPrint(remaining, "<{any}>; rel=modulepreload", .{ this.bundler.options.node_modules_bundle_url, }) catch break :add_preload; @@ -480,8 +480,8 @@ pub const RequestContext = struct { // On Windows, we don't keep the directory handle open forever because Windows doesn't like that. const public_dir: std.fs.Dir = this.bundler.options.routes.static_dir_handle orelse std.fs.openDirAbsolute(this.bundler.options.routes.static_dir, .{}) catch |err| { - this.bundler.log.addErrorFmt(null, logger.Loc.Empty, this.allocator, "Opening public directory failed: {s}", .{@errorName(err)}) catch unreachable; - Output.printErrorln("Opening public directory failed: {s}", .{@errorName(err)}); + this.bundler.log.addErrorFmt(null, logger.Loc.Empty, this.allocator, "Opening public directory failed: {any}", .{@errorName(err)}) catch unreachable; + Output.printErrorln("Opening public directory failed: {any}", .{@errorName(err)}); this.bundler.options.routes.static_dir_enabled = false; return null; }; @@ -614,11 +614,11 @@ pub const RequestContext = struct { else => @compileError("Invalid code passed to printStatusLine"), }; - return std.fmt.comptimePrint("HTTP/1.1 {d} {s}\r\n", .{ code, status_text }); + return std.fmt.comptimePrint("HTTP/1.1 {d} {any}\r\n", .{ code, status_text }); } pub fn printStatusLineError(err: anyerror, buf: []u8) []const u8 { - return std.fmt.bufPrint(buf, "HTTP/1.1 500 {s}\r\n", .{@errorName(err)}) catch unreachable; + return std.fmt.bufPrint(buf, "HTTP/1.1 500 {any}\r\n", .{@errorName(err)}) catch unreachable; } pub fn prepareToSendBody( @@ -685,7 +685,7 @@ pub const RequestContext = struct { return error.SocketClosed; } - Output.prettyErrorln("send() error: {s}", .{err.toSystemError().message.slice()}); + Output.prettyErrorln("send() error: {any}", .{err.toSystemError().message.slice()}); return erro; }, @@ -718,12 +718,12 @@ pub const RequestContext = struct { ctx.status = @as(HTTPStatusCode, 500); } - threadlocal var status_buf: [std.fmt.count("HTTP/1.1 {d} {s}\r\n", .{ 200, "OK" })]u8 = undefined; + threadlocal var status_buf: [std.fmt.count("HTTP/1.1 {d} {any}\r\n", .{ 200, "OK" })]u8 = undefined; pub fn writeStatusSlow(ctx: *RequestContext, code: u16) !void { _ = try ctx.writeSocket( try std.fmt.bufPrint( &status_buf, - "HTTP/1.1 {d} {s}\r\n", + "HTTP/1.1 {d} {any}\r\n", .{ code, if (code > 299) "HM" else "OK" }, ), SOCKET_FLAGS, @@ -785,9 +785,9 @@ pub const RequestContext = struct { pub fn sendInternalError(ctx: *RequestContext, err: anytype) !void { defer ctx.done(); try ctx.writeStatusError(err); - const printed = std.fmt.bufPrint(&error_buf, "error: {s}\nPlease see your terminal for more details", .{@errorName(err)}) catch |err2| brk: { + const printed = std.fmt.bufPrint(&error_buf, "error: {any}\nPlease see your terminal for more details", .{@errorName(err)}) catch |err2| brk: { if (Environment.isDebug or Environment.isTest) { - Global.panic("error while printing error: {s}", .{@errorName(err2)}); + Global.panic("error while printing error: {any}", .{@errorName(err2)}); } break :brk "Internal error"; @@ -913,7 +913,7 @@ pub const RequestContext = struct { defer ctx.done(); const stats = file.stat() catch |err| { - Output.prettyErrorln("Error {s} reading index.html", .{@errorName(err)}); + Output.prettyErrorln("Error {any} reading index.html", .{@errorName(err)}); ctx.writeStatus(500) catch {}; return; }; @@ -1223,7 +1223,7 @@ pub const RequestContext = struct { step, err, - "JavaScript VM failed to start due to {s}.", + "JavaScript VM failed to start due to {any}.", .{ @errorName(err), }, @@ -1390,7 +1390,7 @@ pub const RequestContext = struct { handler.handleJSErrorFmt( .load_entry_point, err, - "JavaScript VM failed to start.\n{s}: while loading \"{s}\"", + "JavaScript VM failed to start.\n{any}: while loading \"{any}\"", .{ @errorName(err), entry_point }, ) catch {}; vm.flush(); @@ -1406,7 +1406,7 @@ pub const RequestContext = struct { handler.handleRuntimeJSError( result, .eval_entry_point, - "JavaScript VM failed to start.\nwhile loading \"{s}\"", + "JavaScript VM failed to start.\nwhile loading \"{any}\"", .{entry_point}, ) catch {}; vm.flush(); @@ -1435,7 +1435,7 @@ pub const RequestContext = struct { if (vm.bundler.options.framework.?.display_name.len > 0) { Output.prettyError( - " {s} ready! (powered by bun)\n", + " {any} ready! (powered by bun)\n", .{ vm.bundler.options.framework.?.display_name, }, @@ -1519,7 +1519,7 @@ pub const RequestContext = struct { handler.handleJSErrorFmt( .resolve_entry_point, error.EntryPointDisabled, - "JavaScript VM failed to start due to disabled entry point: \"{s}\"", + "JavaScript VM failed to start due to disabled entry point: \"{any}\"", .{resolved_entry_point.path_pair.primary.text}, ) catch {}; javascript_disabled = true; @@ -1914,7 +1914,7 @@ pub const RequestContext = struct { defer Output.flush(); handler.conn.client.getError() catch |err| { - Output.prettyErrorln("Websocket ERR: {s}", .{err}); + Output.prettyErrorln("Websocket ERR: {any}", .{err}); handler.tombstone = true; is_socket_closed = true; }; @@ -1928,7 +1928,7 @@ pub const RequestContext = struct { continue; }, else => { - Output.prettyErrorln("Websocket ERR: {s}", .{err}); + Output.prettyErrorln("Websocket ERR: {any}", .{err}); }, } return; @@ -1962,7 +1962,7 @@ pub const RequestContext = struct { ); if (Watcher.getHash(file_path) != full_build.id) { - Output.prettyErrorln("ERR: File path hash mismatch for {s}.", .{full_build.file_path}); + Output.prettyErrorln("ERR: File path hash mismatch for {any}.", .{full_build.file_path}); continue; } // save because WebSocket's buffer is 8096 @@ -1971,7 +1971,7 @@ pub const RequestContext = struct { path_buf.ptr[path_buf.len] = 0; var file_path_z: [:0]u8 = path_buf.ptr[0..path_buf.len :0]; const file = std.fs.openFileAbsoluteZ(file_path_z, .{ .mode = .read_only }) catch |err| { - Output.prettyErrorln("ERR:{s} opening file {s} ", .{ @errorName(err), full_build.file_path }); + Output.prettyErrorln("ERR:{any} opening file {any} ", .{ @errorName(err), full_build.file_path }); continue; }; Fs.FileSystem.setMaxFd(file.handle); @@ -2040,7 +2040,7 @@ pub const RequestContext = struct { switch (build_result.value) { .fail => { Output.prettyErrorln( - "error: {s}", + "error: {any}", .{ file_path, }, @@ -2049,7 +2049,7 @@ pub const RequestContext = struct { .success => { if (build_result.timestamp > cmd.timestamp) { Output.prettyErrorln( - "{d}ms built {s} ({d}+ LOC)", + "{d}ms built {any} ({d}+ LOC)", .{ build_result.timestamp - cmd.timestamp, file_path, @@ -2123,7 +2123,7 @@ pub const RequestContext = struct { _ = try handler.websocket.writeDataFrame(pong); }, else => { - Output.prettyErrorln("Websocket unknown opcode: {s}", .{@tagName(frame.header.opcode)}); + Output.prettyErrorln("Websocket unknown opcode: {any}", .{@tagName(frame.header.opcode)}); }, } } @@ -2162,7 +2162,7 @@ pub const RequestContext = struct { }; // this error is noisy // return std.fmt.parseInt(u8, v, 10) catch { - // Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Version is invalid {s}", .{v}); + // Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Version is invalid {any}", .{v}); // return error.BadRequest; // }; } @@ -2173,7 +2173,7 @@ pub const RequestContext = struct { var request: *RequestContext = &self.ctx; const key = (request.header("Sec-WebSocket-Key") orelse return error.BadRequest); if (key.len < 8) { - Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Key is less than 8 characters long: {s}", .{key}); + Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Key is less than 8 characters long: {any}", .{key}); return error.BadRequest; } @@ -2584,7 +2584,7 @@ pub const RequestContext = struct { )) { if (ctx.watcher.watchloop_handle == null) { ctx.watcher.start() catch |err| { - Output.prettyErrorln("Failed to start watcher: {s}", .{@errorName(err)}); + Output.prettyErrorln("Failed to start watcher: {any}", .{@errorName(err)}); }; } } else |_| {} @@ -2982,7 +2982,7 @@ pub const RequestContext = struct { if (editor != .none) { editor.open(http_editor_context.path, path.text, line, column, bun.default_allocator) catch |err| { if (editor != .other) { - Output.prettyErrorln("Error {s} opening in {s}", .{ @errorName(err), @tagName(editor) }); + Output.prettyErrorln("Error {any} opening in {any}", .{ @errorName(err), @tagName(editor) }); } http_editor_context.editor = Editor.none; @@ -3006,7 +3006,7 @@ pub const RequestContext = struct { resolve_result.file_fd else brk: { var file = std.fs.openFileAbsoluteZ(path.textZ(), .{ .mode = .read_only }) catch |err| { - Output.prettyErrorln("Failed to open {s} due to error {s}", .{ path.text, @errorName(err) }); + Output.prettyErrorln("Failed to open {any} due to error {any}", .{ path.text, @errorName(err) }); return try ctx.sendInternalError(err); }; needs_close = true; @@ -3021,7 +3021,7 @@ pub const RequestContext = struct { const content_length = brk: { var file = std.fs.File{ .handle = fd }; var stat = file.stat() catch |err| { - Output.prettyErrorln("Failed to read {s} due to error {s}", .{ path.text, @errorName(err) }); + Output.prettyErrorln("Failed to read {any} due to error {any}", .{ path.text, @errorName(err) }); return try ctx.sendInternalError(err); }; break :brk stat.size; @@ -3314,7 +3314,7 @@ pub const Server = struct { var hinted_content_fbs = std.io.fixedBufferStream(filechange_buf_hinted[header.len..]); if (comptime Environment.isDebug) { - Output.prettyErrorln("[watcher] {s}: -- {}", .{ @tagName(kind), event.op }); + Output.prettyErrorln("[watcher] {any}: -- {}", .{ @tagName(kind), event.op }); } switch (kind) { @@ -3328,7 +3328,7 @@ pub const Server = struct { ); if (comptime FeatureFlags.verbose_watcher) { - Output.prettyErrorln("File changed: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); + Output.prettyErrorln("File changed: {any}", .{ctx.bundler.fs.relativeTo(file_path)}); } } else { const change_message = Api.WebsocketMessageFileChangeNotification{ @@ -3341,12 +3341,12 @@ pub const Server = struct { const change_buf = content_fbs.getWritten(); const written_buf = filechange_buf[0 .. header.len + change_buf.len]; RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| { - Output.prettyErrorln("Error writing change notification: {s}", .{@errorName(err)}); + Output.prettyErrorln("Error writing change notification: {any}", .{@errorName(err)}); }; if (comptime is_emoji_enabled) { - Output.prettyErrorln("📜 File change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); + Output.prettyErrorln("📜 File change: {any}", .{ctx.bundler.fs.relativeTo(file_path)}); } else { - Output.prettyErrorln(" File change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); + Output.prettyErrorln(" File change: {any}", .{ctx.bundler.fs.relativeTo(file_path)}); } } }, @@ -3411,12 +3411,12 @@ pub const Server = struct { const change_buf = hinted_content_fbs.getWritten(); const written_buf = filechange_buf_hinted[0 .. header.len + change_buf.len]; RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| { - Output.prettyErrorln("Error writing change notification: {s}", .{@errorName(err)}); + Output.prettyErrorln("Error writing change notification: {any}", .{@errorName(err)}); }; if (comptime is_emoji_enabled) { - Output.prettyErrorln("📜 File change: {s}", .{ctx.bundler.fs.relativeTo(abs_path)}); + Output.prettyErrorln("📜 File change: {any}", .{ctx.bundler.fs.relativeTo(abs_path)}); } else { - Output.prettyErrorln(" File change: {s}", .{ctx.bundler.fs.relativeTo(abs_path)}); + Output.prettyErrorln(" File change: {any}", .{ctx.bundler.fs.relativeTo(abs_path)}); } } } @@ -3425,9 +3425,9 @@ pub const Server = struct { // if (event.op.delete or event.op.rename) // ctx.watcher.removeAtIndex(event.index, hashes[event.index], parent_hashes, .directory); if (comptime is_emoji_enabled) { - Output.prettyErrorln("📁 Dir change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); + Output.prettyErrorln("📁 Dir change: {any}", .{ctx.bundler.fs.relativeTo(file_path)}); } else { - Output.prettyErrorln(" Dir change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); + Output.prettyErrorln(" Dir change: {any}", .{ctx.bundler.fs.relativeTo(file_path)}); } }, } @@ -3480,7 +3480,7 @@ pub const Server = struct { continue :restart; }, else => { - Output.prettyErrorln("{s} while trying to start listening on port {d}.\n\n", .{ @errorName(err), port }); + Output.prettyErrorln("{any} while trying to start listening on port {d}.\n\n", .{ @errorName(err), port }); Global.exit(1); }, } @@ -3537,7 +3537,7 @@ pub const Server = struct { if (std.mem.readIntNative(u32, &addr.ipv4.host.octets) == 0 or std.mem.readIntNative(u128, &addr.ipv6.host.octets) == 0) { if (server.bundler.options.routes.single_page_app_routing) { Output.prettyError( - " bun!! v{s}\n\n\n Link: http://localhost:{d}\n {s}/index.html \n\n\n", + " bun!! v{any}\n\n\n Link: http://localhost:{d}\n {any}/index.html \n\n\n", .{ Global.package_json_version_with_sha, addr.ipv4.port, @@ -3545,20 +3545,20 @@ pub const Server = struct { }, ); } else { - Output.prettyError(" bun!! v{s}\n\n\n Link: http://localhost:{d}\n\n\n", .{ + Output.prettyError(" bun!! v{any}\n\n\n Link: http://localhost:{d}\n\n\n", .{ Global.package_json_version_with_sha, addr.ipv4.port, }); } } else { if (server.bundler.options.routes.single_page_app_routing) { - Output.prettyError(" bun!! v{s}\n\n\n Link: http://{s}\n {s}/index.html \n\n\n", .{ + Output.prettyError(" bun!! v{any}\n\n\n Link: http://{any}\n {any}/index.html \n\n\n", .{ Global.package_json_version_with_sha, addr, display_path, }); } else { - Output.prettyError(" bun!! v{s}\n\n\n Link: http://{s}\n\n\n", .{ + Output.prettyError(" bun!! v{any}\n\n\n Link: http://{any}\n\n\n", .{ Global.package_json_version_with_sha, addr, }); @@ -3641,7 +3641,7 @@ pub const Server = struct { var req = picohttp.Request.parse(req_buf_node.data[0..read_size], &req_headers_buf) catch |err| { _ = conn.client.write(RequestContext.printStatusLine(400) ++ "\r\n\r\n", SOCKET_FLAGS) catch {}; _ = Syscall.close(conn.client.socket.fd); - Output.printErrorln("ERR: {s}", .{@errorName(err)}); + Output.printErrorln("ERR: {any}", .{@errorName(err)}); return; }; @@ -3657,7 +3657,7 @@ pub const Server = struct { server.watcher, server.timer, ) catch |err| { - Output.prettyErrorln("[{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.prettyErrorln("[{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); _ = Syscall.close(conn.client.socket.fd); request_arena.deinit(); return; @@ -3682,7 +3682,7 @@ pub const Server = struct { if (req_ctx.url.needs_redirect) { req_ctx.handleRedirect(req_ctx.url.path) catch |err| { - Output.prettyErrorln("[{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.prettyErrorln("[{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); conn.client.deinit(); return; }; @@ -3726,13 +3726,13 @@ pub const Server = struct { 200, 304, 101 => {}, 201...303, 305...399 => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); + Output.prettyErrorln("{d} {any} {any} as {any}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); }, 400...499 => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); + Output.prettyErrorln("{d} {any} {any} as {any}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); }, else => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); + Output.prettyErrorln("{d} {any} {any} as {any}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); }, } } @@ -3748,13 +3748,13 @@ pub const Server = struct { 200, 304, 101 => {}, 201...303, 305...399 => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); + Output.prettyErrorln("{d} {any} {any} as {any}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); }, 400...499 => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); + Output.prettyErrorln("{d} {any} {any} as {any}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); }, else => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); + Output.prettyErrorln("{d} {any} {any} as {any}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); }, } } @@ -3772,7 +3772,7 @@ pub const Server = struct { } var finished = req_ctx.handleReservedRoutes(server) catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); did_print = true; return; }; @@ -3783,7 +3783,7 @@ pub const Server = struct { if (comptime features.single_page_app_routing) { if (req_ctx.url.isRoot(server.bundler.options.routes.asset_prefix_path)) { req_ctx.sendSinglePageHTML() catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); did_print = true; }; finished = true; @@ -3798,7 +3798,7 @@ pub const Server = struct { if (req_ctx.matchPublicFolder(comptime features.public_folder == .last or features.single_page_app_routing)) |result| { finished = true; req_ctx.renderServeResult(result) catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); did_print = true; return; }; @@ -3816,7 +3816,7 @@ pub const Server = struct { switch (err) { error.ModuleNotFound => {}, else => { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); did_print = true; }, } @@ -3832,7 +3832,7 @@ pub const Server = struct { break :request_handler; }, else => { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); did_print = true; }, } @@ -3847,7 +3847,7 @@ pub const Server = struct { if (req_ctx.matchPublicFolder(false)) |result| { finished = true; req_ctx.renderServeResult(result) catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); did_print = true; }; } @@ -3860,7 +3860,7 @@ pub const Server = struct { if (!finished and (req_ctx.bundler.options.routes.single_page_app_routing and req_ctx.url.extname.len == 0)) { if (!finished) { req_ctx.sendSinglePageHTML() catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); did_print = true; }; } @@ -3872,7 +3872,7 @@ pub const Server = struct { // if we're about to 404 and it's the favicon, use our stand-in if (strings.eqlComptime(req_ctx.url.path, "favicon.ico")) { req_ctx.sendFavicon() catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); + Output.printErrorln("FAIL [{any}] - {any}: {any}", .{ @errorName(err), req.method, req.path }); did_print = true; }; return; diff --git a/src/http/websocket_http_client.zig b/src/http/websocket_http_client.zig index 24aeaf4721e8e7..2d7ddf47b7796b 100644 --- a/src/http/websocket_http_client.zig +++ b/src/http/websocket_http_client.zig @@ -51,8 +51,8 @@ fn buildRequestBody(vm: *JSC.VirtualMachine, pathname: *const JSC.ZigString, hos const pico_headers = PicoHTTP.Headers{ .headers = headers_ }; return try std.fmt.allocPrint( allocator, - "GET {s} HTTP/1.1\r\n" ++ - "Host: {s}\r\n" ++ + "GET {any} HTTP/1.1\r\n" ++ + "Host: {any}\r\n" ++ "Pragma: no-cache\r\n" ++ "Cache-Control: no-cache\r\n" ++ "Connection: Upgrade\r\n" ++ @@ -978,7 +978,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { var header_bytes: [@sizeOf(usize)]u8 = [_]u8{0} ** @sizeOf(usize); while (true) { - log("onData ({s})", .{@tagName(receive_state)}); + log("onData ({any})", .{@tagName(receive_state)}); switch (receive_state) { // 0 1 2 3 diff --git a/src/http_client_async.zig b/src/http_client_async.zig index 669257e73b9048..1e50199d3db790 100644 --- a/src/http_client_async.zig +++ b/src/http_client_async.zig @@ -128,7 +128,7 @@ fn NewHTTPContext(comptime ssl: bool) type { pending.hostname_len = @truncate(u8, hostname.len); pending.port = port; - log("- Keep-Alive release {s}:{d}", .{ hostname, port }); + log("- Keep-Alive release {any}:{d}", .{ hostname, port }); return; } } @@ -293,7 +293,7 @@ fn NewHTTPContext(comptime ssl: bool) type { continue; } - log("+ Keep-Alive reuse {s}:{d}", .{ hostname, port }); + log("+ Keep-Alive reuse {any}:{d}", .{ hostname, port }); return http_socket; } } @@ -480,7 +480,7 @@ pub fn onOpen( std.debug.assert(is_ssl == client.url.isHTTPS()); } - log("Connected {s} \n", .{client.url.href}); + log("Connected {any} \n", .{client.url.href}); if (comptime is_ssl) { var ssl: *BoringSSL.SSL = @ptrCast(*BoringSSL.SSL, socket.getNativeHandle()); @@ -512,7 +512,7 @@ pub fn onClose( comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, ) void { - log("Closed {s}\n", .{client.url.href}); + log("Closed {any}\n", .{client.url.href}); const in_progress = client.state.stage != .done and client.state.stage != .fail; @@ -545,7 +545,7 @@ pub fn onTimeout( socket: NewHTTPContext(is_ssl).HTTPSocket, ) void { _ = socket; - log("Timeout {s}\n", .{client.url.href}); + log("Timeout {any}\n", .{client.url.href}); if (client.state.stage != .done and client.state.stage != .fail) client.fail(error.Timeout); @@ -556,7 +556,7 @@ pub fn onConnectError( socket: NewHTTPContext(is_ssl).HTTPSocket, ) void { _ = socket; - log("onConnectError {s}\n", .{client.url.href}); + log("onConnectError {any}\n", .{client.url.href}); if (client.state.stage != .done and client.state.stage != .fail) client.fail(error.ConnectionRefused); @@ -566,7 +566,7 @@ pub fn onEnd( comptime is_ssl: bool, _: NewHTTPContext(is_ssl).HTTPSocket, ) void { - log("onEnd {s}\n", .{client.url.href}); + log("onEnd {any}\n", .{client.url.href}); if (client.state.stage != .done and client.state.stage != .fail) client.fail(error.ConnectionClosed); @@ -1893,7 +1893,7 @@ pub fn handleResponseMetadata( const original_url = this.url; this.url = URL.parse(std.fmt.bufPrint( &url_buf.data, - "{s}://{s}{s}", + "{any}://{any}{any}", .{ original_url.displayProtocol(), original_url.displayHostname(), location }, ) catch return error.RedirectURLTooLong); diff --git a/src/io/io_darwin.zig b/src/io/io_darwin.zig index 104da050785dbb..c2160d5d9c61fd 100644 --- a/src/io/io_darwin.zig +++ b/src/io/io_darwin.zig @@ -746,7 +746,7 @@ fn flush(self: *IO, comptime _: @Type(.EnumLiteral)) !void { ); if (new_events_ < 0) { - return std.debug.panic("kevent() failed {s}", .{@tagName(std.c.getErrno(new_events_))}); + return std.debug.panic("kevent() failed {any}", .{@tagName(std.c.getErrno(new_events_))}); } const new_events = @intCast(usize, new_events_); diff --git a/src/js_ast.zig b/src/js_ast.zig index 72900b8ff784ee..7eede039c0dac9 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -4872,7 +4872,7 @@ pub const Macro = struct { source, import_range, log.msgs.allocator, - "Macro \"{s}\" not found", + "Macro \"{any}\" not found", .{import_record_path}, .stmt, err, @@ -4884,7 +4884,7 @@ pub const Macro = struct { source, import_range, log.msgs.allocator, - "{s} resolving macro \"{s}\"", + "{any} resolving macro \"{any}\"", .{ @errorName(err), import_record_path }, ) catch unreachable; return err; @@ -5547,7 +5547,7 @@ pub const Macro = struct { }, else => { if (comptime Environment.isDebug) { - Output.prettyWarnln("initExpr fail: {s}", .{@tagName(this.data)}); + Output.prettyWarnln("initExpr fail: {any}", .{@tagName(this.data)}); } return JSNode{ .loc = this.loc, .data = .{ .e_missing = .{} } }; }, @@ -5957,7 +5957,7 @@ pub const Macro = struct { if (!@hasField(JSNode.Tag, name)) { @compileError( "JSNode.Tag does not have a \"" ++ name ++ "\" field. Valid fields are " ++ std.fmt.comptimePrint( - "{s}", + "{any}", .{ std.meta.fieldNames(@TypeOf(valid_tags)), }, @@ -6709,7 +6709,7 @@ pub const Macro = struct { Tag.e_super, Tag.e_null, Tag.e_undefined, Tag.e_missing, Tag.inline_true, Tag.inline_false, Tag.e_this => { self.args.append(Expr{ .loc = loc, .data = Tag.ids.get(tag) }) catch unreachable; }, - else => Global.panic("Tag \"{s}\" is not implemented yet.", .{@tagName(tag)}), + else => Global.panic("Tag \"{any}\" is not implemented yet.", .{@tagName(tag)}), } return true; @@ -6745,7 +6745,7 @@ pub const Macro = struct { tag_expr.loc, ); }, - else => Global.panic("Not implemented yet top-level jsx element: {s}", .{@tagName(tag_expr.data)}), + else => Global.panic("Not implemented yet top-level jsx element: {any}", .{@tagName(tag_expr.data)}), } } else { const loc = logger.Loc.Empty; @@ -6783,15 +6783,15 @@ pub const Macro = struct { const node_type: JSNode.Tag = JSNode.Tag.names.get(str.data) orelse { if (!str.isUTF8()) { - self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{s}\" is invalid", .{strings.toUTF8Alloc(self.p.allocator, str.slice16())}) catch unreachable; + self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{strings.toUTF8Alloc(self.p.allocator, str.slice16())}) catch unreachable; } else { - self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{s}\" is invalid", .{str.data}) catch unreachable; + self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{str.data}) catch unreachable; } return false; }; if (!valid_tags.get(node_type)) { - self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{s}\" is invalid here", .{str.data}) catch unreachable; + self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid here", .{str.data}) catch unreachable; } return self.writeNodeType(node_type, element.properties.slice(), element.children.slice(), tag_expr.loc); @@ -6805,9 +6805,9 @@ pub const Macro = struct { const node_type: JSNode.Tag = JSNode.Tag.names.get(str.data) orelse { if (!str.isUTF8()) { - self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{s}\" is invalid", .{strings.toUTF8Alloc(self.p.allocator, str.slice16())}) catch unreachable; + self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{strings.toUTF8Alloc(self.p.allocator, str.slice16())}) catch unreachable; } else { - self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{s}\" is invalid", .{str.data}) catch unreachable; + self.log.addErrorFmt(p.source, tag_expr.loc, p.allocator, "Tag \"{any}\" is invalid", .{str.data}) catch unreachable; } return false; }; @@ -7838,7 +7838,7 @@ pub const Macro = struct { this.source, this.caller.loc, this.allocator, - "cannot coerce {s} to Bun's AST. Please return a valid macro using the JSX syntax", + "cannot coerce {any} to Bun's AST. Please return a valid macro using the JSX syntax", .{@tagName(value.jsType())}, ) catch unreachable; break :brk error.MacroFailed; @@ -8085,7 +8085,7 @@ pub const Macro = struct { this.source, this.caller.loc, this.allocator, - "cannot coerce {s} to Bun's AST. Please return a valid macro using the JSX syntax", + "cannot coerce {any} to Bun's AST. Please return a valid macro using the JSX syntax", .{@tagName(value.jsType())}, ) catch unreachable; return error.MacroFailed; @@ -8106,7 +8106,7 @@ pub const Macro = struct { visitor: Visitor, javascript_object: JSC.JSValue, ) MacroError!Expr { - if (comptime Environment.isDebug) Output.prettyln("[macro] call {s}", .{function_name}); + if (comptime Environment.isDebug) Output.prettyln("[macro] call {any}", .{function_name}); exception_holder = Zig.ZigException.Holder.init(); expr_nodes_buf[0] = JSNode.initExpr(caller); diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 056e777db13eb7..19591c2a632457 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -196,7 +196,7 @@ fn NewLexer_( pub fn addDefaultError(self: *LexerType, msg: []const u8) !void { @setCold(true); - self.addError(self.start, "{s}", .{msg}, true); + self.addError(self.start, "{any}", .{msg}, true); return Error.SyntaxError; } @@ -816,7 +816,7 @@ fn NewLexer_( } pub fn addUnsupportedSyntaxError(self: *LexerType, msg: []const u8) !void { - self.addError(self.end, "Unsupported syntax: {s}", .{msg}, true); + self.addError(self.end, "Unsupported syntax: {any}", .{msg}, true); return Error.SyntaxError; } @@ -936,7 +936,7 @@ fn NewLexer_( if (!isIdentifier(identifier)) { try lexer.addRangeError( .{ .loc = logger.usize2Loc(lexer.start), .len = @intCast(i32, lexer.end - lexer.start) }, - "Invalid identifier: \"{s}\"", + "Invalid identifier: \"{any}\"", .{result.contents}, true, ); @@ -965,13 +965,13 @@ fn NewLexer_( pub fn expectContextualKeyword(self: *LexerType, comptime keyword: string) !void { if (!self.isContextualKeyword(keyword)) { if (@import("builtin").mode == std.builtin.Mode.Debug) { - self.addError(self.start, "Expected \"{s}\" but found \"{s}\" (token: {s})", .{ + self.addError(self.start, "Expected \"{any}\" but found \"{any}\" (token: {any})", .{ keyword, self.raw(), self.token, }, true); } else { - self.addError(self.start, "Expected \"{s}\" but found \"{s}\"", .{ keyword, self.raw() }, true); + self.addError(self.start, "Expected \"{any}\" but found \"{any}\"", .{ keyword, self.raw() }, true); } return Error.UnexpectedSyntax; } @@ -1743,7 +1743,7 @@ fn NewLexer_( } }; - try lexer.addRangeError(lexer.range(), "Unexpected {s}", .{found}, true); + try lexer.addRangeError(lexer.range(), "Unexpected {any}", .{found}, true); } pub fn raw(self: *LexerType) []const u8 { @@ -1763,7 +1763,7 @@ fn NewLexer_( } }; - try self.addRangeError(self.range(), "Expected {s} but found \"{s}\"", .{ text, found }, true); + try self.addRangeError(self.range(), "Expected {any} but found \"{any}\"", .{ text, found }, true); } fn scanCommentText(lexer: *LexerType) void { @@ -2090,7 +2090,7 @@ fn NewLexer_( lexer.step(); } } else { - try lexer.addSyntaxError(lexer.range().endI(), "Expected identifier after \"{s}\" in namespaced JSX name", .{lexer.raw()}); + try lexer.addSyntaxError(lexer.range().endI(), "Expected identifier after \"{any}\" in namespaced JSX name", .{lexer.raw()}); } } @@ -2329,10 +2329,10 @@ fn NewLexer_( cursor.c = std.fmt.parseInt(i32, number, base) catch |err| brk: { switch (err) { error.InvalidCharacter => { - lexer.addError(lexer.start, "Invalid JSX entity escape: {s}", .{entity}, false); + lexer.addError(lexer.start, "Invalid JSX entity escape: {any}", .{entity}, false); }, error.Overflow => { - lexer.addError(lexer.start, "JSX entity escape is too big: {s}", .{entity}, false); + lexer.addError(lexer.start, "JSX entity escape is too big: {any}", .{entity}, false); }, } @@ -2630,7 +2630,7 @@ fn NewLexer_( if (std.fmt.parseFloat(f64, text)) |num| { lexer.number = num; } else |_| { - try lexer.addSyntaxError(lexer.start, "Invalid number {s}", .{text}); + try lexer.addSyntaxError(lexer.start, "Invalid number {any}", .{text}); } } } diff --git a/src/js_parser.zig b/src/js_parser.zig index 015ac960c7fa99..8e87a97532f0fb 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -943,7 +943,7 @@ pub const ImportScanner = struct { try p.import_records_for_current_part.append(allocator, st.import_record_index); for (st.items) |item| { - const ref = item.name.ref orelse p.panic("Expected export from item to have a name {s}", .{st}); + const ref = item.name.ref orelse p.panic("Expected export from item to have a name {any}", .{st}); // Note that the imported alias is not item.Alias, which is the // exported alias. This is somewhat confusing because each // SExportFrom statement is basically SImport + SExportClause in one. @@ -4266,7 +4266,7 @@ fn NewParser_( const symbol_name = p.import_records.items[import_record_index].path.name.nonUniqueNameString(p.allocator); const cjs_import_name = std.fmt.allocPrint( p.allocator, - "{s}_{x}_{d}", + "{any}_{x}_{d}", .{ symbol_name, @truncate( @@ -4520,7 +4520,7 @@ fn NewParser_( unreachable; } - // Output.print("\nStmt: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start }); + // Output.print("\nStmt: {any} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start }); if (@typeInfo(Type) == .Pointer) { // ExportFrom normally becomes import records during the visiting pass // However, we skip the visiting pass in this mode @@ -4556,7 +4556,7 @@ fn NewParser_( } } - // Output.print("\nExpr: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start }); + // Output.print("\nExpr: {any} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start }); if (@typeInfo(Type) == .Pointer) { if (comptime only_scan_imports_and_do_not_visit) { if (Type == *E.Call) { @@ -4642,7 +4642,7 @@ fn NewParser_( // Forbid referencing "arguments" inside class bodies if (scope.forbid_arguments and !did_forbid_argumen and strings.eqlComptime(name, "arguments")) { const r = js_lexer.rangeOfIdentifier(p.source, loc); - p.log.addRangeErrorFmt(p.source, r, allocator, "Cannot access \"{s}\" here", .{name}) catch unreachable; + p.log.addRangeErrorFmt(p.source, r, allocator, "Cannot access \"{any}\" here", .{name}) catch unreachable; did_forbid_argumen = true; } @@ -4697,7 +4697,7 @@ fn NewParser_( } }, else => { - p.panic("Unexpected binding export type {s}", .{binding}); + p.panic("Unexpected binding export type {any}", .{binding}); }, } } @@ -4714,7 +4714,7 @@ fn NewParser_( // Duplicate exports are an error var notes = try p.allocator.alloc(logger.Data, 1); notes[0] = logger.Data{ - .text = try std.fmt.allocPrint(p.allocator, "\"{s}\" was originally exported here", .{alias}), + .text = try std.fmt.allocPrint(p.allocator, "\"{any}\" was originally exported here", .{alias}), .location = logger.Location.init_or_nil(p.source, js_lexer.rangeOfIdentifier(p.source, name.alias_loc)), }; try p.log.addRangeErrorFmtWithNotes( @@ -4722,7 +4722,7 @@ fn NewParser_( js_lexer.rangeOfIdentifier(p.source, loc), p.allocator, notes, - "Multiple exports with the same name \"{s}\"", + "Multiple exports with the same name \"{any}\"", .{std.mem.trim(u8, alias, "\"'")}, ); } else { @@ -4786,7 +4786,7 @@ fn NewParser_( if ((opts.assign_target != .none or opts.is_delete_target) and p.symbols.items[ref.innerIndex()].kind == .import) { // Create an error for assigning to an import namespace const r = js_lexer.rangeOfIdentifier(p.source, loc); - p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot assign to import \"{s}\"", .{ + p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot assign to import \"{any}\"", .{ p.symbols.items[ref.innerIndex()].original_name, }) catch unreachable; } @@ -5180,12 +5180,12 @@ fn NewParser_( r, std.fmt.allocPrint( allocator, - "{s} was originally declared here", + "{any} was originally declared here", .{existing_symbol.original_name}, ) catch unreachable, ); - p.log.addRangeErrorFmtWithNotes(p.source, js_lexer.rangeOfIdentifier(p.source, existing_member_entry.value.loc), allocator, notes, "{s} has already been declared", .{symbol.original_name}) catch unreachable; + p.log.addRangeErrorFmtWithNotes(p.source, js_lexer.rangeOfIdentifier(p.source, existing_member_entry.value.loc), allocator, notes, "{any} has already been declared", .{symbol.original_name}) catch unreachable; } continue :nextMember; @@ -5223,7 +5223,7 @@ fn NewParser_( // Sanity-check that the scopes generated by the first and second passes match if (order.loc.start != loc.start or order.scope.kind != kind) { - p.panic("Expected scope ({s}, {d}) in {s}, found scope ({s}, {d})", .{ kind, loc.start, p.source.path.pretty, order.scope.kind, order.loc.start }); + p.panic("Expected scope ({any}, {d}) in {any}, found scope ({any}, {d})", .{ kind, loc.start, p.source.path.pretty, order.scope.kind, order.loc.start }); } p.current_scope = order.scope; @@ -5450,7 +5450,7 @@ fn NewParser_( } if (errors.invalid_expr_after_question) |r| { - p.log.addRangeErrorFmt(p.source, r, p.allocator, "Unexpected {s}", .{p.source.contents[r.loc.i()..r.endI()]}) catch unreachable; + p.log.addRangeErrorFmt(p.source, r, p.allocator, "Unexpected {any}", .{p.source.contents[r.loc.i()..r.endI()]}) catch unreachable; } // if (errors.array_spread_feature) |err| { @@ -6581,7 +6581,7 @@ fn NewParser_( } fn createDefaultName(p: *P, loc: logger.Loc) !js_ast.LocRef { - var identifier = try std.fmt.allocPrint(p.allocator, "{s}_default", .{try p.source.path.name.nonUniqueNameString(p.allocator)}); + var identifier = try std.fmt.allocPrint(p.allocator, "{any}_default", .{try p.source.path.name.nonUniqueNameString(p.allocator)}); const name = js_ast.LocRef{ .loc = loc, .ref = try p.newSymbol(Symbol.Kind.other, identifier) }; @@ -7984,7 +7984,7 @@ fn NewParser_( } } } - // Output.print("\n\nmVALUE {s}:{s}\n", .{ expr, name }); + // Output.print("\n\nmVALUE {any}:{any}\n", .{ expr, name }); try p.lexer.expectOrInsertSemicolon(); return p.s(S.SExpr{ .value = expr }, loc); }, @@ -8253,7 +8253,7 @@ fn NewParser_( } else |_| { const r = p.source.rangeOfString(loc); // TODO: improve error message - try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Invalid {s} alias because it contains an unpaired Unicode surrogate (like emoji)", .{kind}); + try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Invalid {any} alias because it contains an unpaired Unicode surrogate (like emoji)", .{kind}); return p.source.textForRange(r); } } @@ -8336,7 +8336,7 @@ fn NewParser_( if (isEvalOrArguments(original_name)) { const r = p.source.rangeOfString(name.loc); - try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use {s} as an identifier here", .{original_name}); + try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use {any} as an identifier here", .{original_name}); } try items.append(.{ @@ -8380,7 +8380,7 @@ fn NewParser_( // Reject forbidden names if (isEvalOrArguments(original_name)) { const r = js_lexer.rangeOfIdentifier(p.source, name.loc); - try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use \"{s}\" as an identifier here", .{original_name}); + try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use \"{any}\" as an identifier here", .{original_name}); } try items.append(js_ast.ClauseItem{ @@ -8433,11 +8433,11 @@ fn NewParser_( return; } - try p.log.addError(p.source, value.loc, comptime std.fmt.comptimePrint("for-{s} loop variables cannot have an initializer", .{loop_type})); + try p.log.addError(p.source, value.loc, comptime std.fmt.comptimePrint("for-{any} loop variables cannot have an initializer", .{loop_type})); } }, else => { - try p.log.addError(p.source, decls[0].binding.loc, comptime std.fmt.comptimePrint("for-{s} loops must have a single declaration", .{loop_type})); + try p.log.addError(p.source, decls[0].binding.loc, comptime std.fmt.comptimePrint("for-{any} loops must have a single declaration", .{loop_type})); }, } } @@ -8487,7 +8487,7 @@ fn NewParser_( switch (decl.binding.data) { .b_identifier => |ident| { const r = js_lexer.rangeOfIdentifier(p.source, decl.binding.loc); - try p.log.addRangeErrorFmt(p.source, r, p.allocator, "The constant \"{s}\" must be initialized", .{p.symbols.items[ident.ref.innerIndex()].original_name}); + try p.log.addRangeErrorFmt(p.source, r, p.allocator, "The constant \"{any}\" must be initialized", .{p.symbols.items[ident.ref.innerIndex()].original_name}); // return;/ }, else => { @@ -9055,7 +9055,7 @@ fn NewParser_( // "export from" statement after all if (first_non_identifier_loc.start != 0 and !p.lexer.isContextualKeyword("from")) { const r = js_lexer.rangeOfIdentifier(p.source, first_non_identifier_loc); - try p.lexer.addRangeError(r, "Expected identifier but found \"{s}\"", .{p.source.textForRange(r)}, true); + try p.lexer.addRangeError(r, "Expected identifier but found \"{any}\"", .{p.source.textForRange(r)}, true); return error.SyntaxError; } @@ -9210,8 +9210,8 @@ fn NewParser_( .with_statement => "With statements", .delete_bare_name => "\"delete\" of a bare identifier", .for_in_var_init => "Variable initializers within for-in loops", - .eval_or_arguments => try std.fmt.allocPrint(p.allocator, "Declarations with the name {s}", .{detail}), - .reserved_word => try std.fmt.allocPrint(p.allocator, "\"{s}\" is a reserved word and", .{detail}), + .eval_or_arguments => try std.fmt.allocPrint(p.allocator, "Declarations with the name {any}", .{detail}), + .reserved_word => try std.fmt.allocPrint(p.allocator, "\"{any}\" is a reserved word and", .{detail}), .legacy_octal_literal => "Legacy octal literals", .legacy_octal_escape => "Legacy octal escape sequences", .if_else_function_stmt => "Function declarations inside if statements", @@ -9241,13 +9241,13 @@ fn NewParser_( else => {}, } if (why.len == 0) { - why = try std.fmt.allocPrint(p.allocator, "This file is implicitly in strict mode because of the \"{s}\" keyword here", .{p.source.textForRange(where)}); + why = try std.fmt.allocPrint(p.allocator, "This file is implicitly in strict mode because of the \"{any}\" keyword here", .{p.source.textForRange(where)}); } var notes = try p.allocator.alloc(logger.Data, 1); notes[0] = logger.rangeData(p.source, where, why); - try p.log.addRangeErrorWithNotes(p.source, r, try std.fmt.allocPrint(p.allocator, "{s} cannot be used in strict mode", .{text}), notes); + try p.log.addRangeErrorWithNotes(p.source, r, try std.fmt.allocPrint(p.allocator, "{any} cannot be used in strict mode", .{text}), notes); } else if (!can_be_transformed and p.isStrictModeOutputFormat()) { - try p.log.addRangeError(p.source, r, try std.fmt.allocPrint(p.allocator, "{s} cannot be used with esm due to strict mode", .{text})); + try p.log.addRangeError(p.source, r, try std.fmt.allocPrint(p.allocator, "{any} cannot be used with esm due to strict mode", .{text})); } } @@ -9346,7 +9346,7 @@ fn NewParser_( js_lexer.rangeOfIdentifier(p.source, existing.loc), std.fmt.allocPrint( p.allocator, - "{s} was originally declared here", + "{any} was originally declared here", .{symbol.original_name}, ) catch unreachable, ); @@ -9356,7 +9356,7 @@ fn NewParser_( js_lexer.rangeOfIdentifier(p.source, loc), p.allocator, notes, - "\"{s}\" has already been declared", + "\"{any}\" has already been declared", .{symbol.original_name}, ) catch unreachable; @@ -10291,7 +10291,7 @@ fn NewParser_( .get => { if (func.args.len > 0) { const r = js_lexer.rangeOfIdentifier(p.source, func.args[0].binding.loc); - p.log.addRangeErrorFmt(p.source, r, p.allocator, "Getter {s} must have zero arguments", .{p.keyNameForError(key)}) catch unreachable; + p.log.addRangeErrorFmt(p.source, r, p.allocator, "Getter {any} must have zero arguments", .{p.keyNameForError(key)}) catch unreachable; } }, .set => { @@ -10300,7 +10300,7 @@ fn NewParser_( if (func.args.len > 1) { r = js_lexer.rangeOfIdentifier(p.source, func.args[1].binding.loc); } - p.log.addRangeErrorFmt(p.source, r, p.allocator, "Setter {s} must have exactly 1 argument (there are {d})", .{ p.keyNameForError(key), func.args.len }) catch unreachable; + p.log.addRangeErrorFmt(p.source, r, p.allocator, "Setter {any} must have exactly 1 argument (there are {d})", .{ p.keyNameForError(key), func.args.len }) catch unreachable; } }, else => {}, @@ -10447,7 +10447,7 @@ fn NewParser_( // Forbid decorators on class constructors if (opts.ts_decorators.len > 0) { - switch ((property.key orelse p.panic("Internal error: Expected property {s} to have a key.", .{property})).data) { + switch ((property.key orelse p.panic("Internal error: Expected property {any} to have a key.", .{property})).data) { .e_string => |str| { if (str.eqlComptime("constructor")) { p.log.addError(p.source, first_decorator_loc, "TypeScript does not allow decorators on class constructors") catch unreachable; @@ -11377,7 +11377,7 @@ fn NewParser_( if (strings.eqlComptime(clause.alias, "default")) { var non_unique_name = record.path.name.nonUniqueNameString(p.allocator) catch unreachable; - clause.original_name = std.fmt.allocPrint(p.allocator, "{s}_default", .{non_unique_name}) catch unreachable; + clause.original_name = std.fmt.allocPrint(p.allocator, "{any}_default", .{non_unique_name}) catch unreachable; record.contains_default_alias = true; } const name_ref = p.declareSymbol(.import, this.loc, clause.original_name) catch unreachable; @@ -11403,13 +11403,13 @@ fn NewParser_( p.log.printForLogLevel( panic_stream.writer(), ) catch unreachable; - Global.panic("{s}", .{panic_buffer[0..panic_stream.pos]}); + Global.panic("{any}", .{panic_buffer[0..panic_stream.pos]}); } pub fn parsePrefix(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr { const loc = p.lexer.loc(); const l = @enumToInt(level); - // Output.print("Parse Prefix {s}:{s} @{s} ", .{ p.lexer.token, p.lexer.raw(), @tagName(level) }); + // Output.print("Parse Prefix {any}:{any} @{any} ", .{ p.lexer.token, p.lexer.raw(), @tagName(level) }); switch (p.lexer.token) { .t_super => { @@ -11663,7 +11663,7 @@ fn NewParser_( const private = value.data.e_index.index.data.e_private_identifier; const name = p.loadNameFromRef(private.ref); const range = logger.Range{ .loc = value.loc, .len = @intCast(i32, name.len) }; - p.log.addRangeErrorFmt(p.source, range, p.allocator, "Deleting the private name \"{s}\" is forbidden", .{name}) catch unreachable; + p.log.addRangeErrorFmt(p.source, range, p.allocator, "Deleting the private name \"{any}\" is forbidden", .{name}) catch unreachable; } } @@ -12517,7 +12517,7 @@ fn NewParser_( const end_tag = try JSXTag.parse(P, p); if (!strings.eql(end_tag.name, tag.name)) { - try p.log.addRangeErrorFmt(p.source, end_tag.range, p.allocator, "Expected closing tag to match opening tag <{s}>", .{ + try p.log.addRangeErrorFmt(p.source, end_tag.range, p.allocator, "Expected closing tag to match opening tag <{any}>", .{ end_tag.name, tag.name, }); @@ -12788,7 +12788,7 @@ fn NewParser_( } }, else => { - Global.panic("Unexpected type in export default: {s}", .{s2}); + Global.panic("Unexpected type in export default: {any}", .{s2}); }, } }, @@ -12941,7 +12941,7 @@ fn NewParser_( p.log.addError(p.source, expr.loc, "Invalid assignment target") catch unreachable; } - // Output.print("\nVisit: {s} - {d}\n", .{ @tagName(expr.data), expr.loc.start }); + // Output.print("\nVisit: {any} - {d}\n", .{ @tagName(expr.data), expr.loc.start }); switch (expr.data) { .e_null, .e_super, .e_boolean, .e_big_int, .e_reg_exp, .e_undefined => {}, @@ -13014,7 +13014,7 @@ fn NewParser_( // Handle assigning to a constant // if (in.assign_target != .none and p.symbols.items[result.ref.innerIndex()].kind == .cconst) { // const r = js_lexer.rangeOfIdentifier(p.source, expr.loc); - // p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot assign to {s} because it is a constant", .{name}) catch unreachable; + // p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot assign to {any} because it is a constant", .{name}) catch unreachable; // } var original_name: ?string = null; @@ -13709,7 +13709,7 @@ fn NewParser_( p.source, tag.loc, p.allocator, - "<{s} /> is a void element and must not have \"children\"", + "<{any} /> is a void element and must not have \"children\"", .{tag.data.e_string.slice(p.allocator)}, ) catch {}; } @@ -14114,7 +14114,7 @@ fn NewParser_( const kind: Symbol.Kind = p.symbols.items[result.ref.innerIndex()].kind; if (!Symbol.isKindPrivate(kind)) { const r = logger.Range{ .loc = e_.left.loc, .len = @intCast(i32, name.len) }; - p.log.addRangeErrorFmt(p.source, r, p.allocator, "Private name \"{s}\" must be declared in an enclosing class", .{name}) catch unreachable; + p.log.addRangeErrorFmt(p.source, r, p.allocator, "Private name \"{any}\" must be declared in an enclosing class", .{name}) catch unreachable; } e_.right = p.visitExpr(e_.right); @@ -14156,7 +14156,7 @@ fn NewParser_( p.source, js_lexer.rangeOfIdentifier(p.source, tag.loc), p.allocator, - "Invalid JSX tag: \"{s}\"", + "Invalid JSX tag: \"{any}\"", .{tag_string}, ) catch unreachable; return expr; @@ -14181,7 +14181,7 @@ fn NewParser_( p.source, js_lexer.rangeOfIdentifier(p.source, tag.loc), p.allocator, - "Invalid JSX tag: \"{s}\"", + "Invalid JSX tag: \"{any}\"", .{tag_string}, ) catch unreachable; return expr; @@ -14554,17 +14554,17 @@ fn NewParser_( var r: logger.Range = undefined; if (!Symbol.isKindPrivate(kind)) { r = logger.Range{ .loc = e_.index.loc, .len = @intCast(i32, name.len) }; - p.log.addRangeErrorFmt(p.source, r, p.allocator, "Private name \"{s}\" must be declared in an enclosing class", .{name}) catch unreachable; + p.log.addRangeErrorFmt(p.source, r, p.allocator, "Private name \"{any}\" must be declared in an enclosing class", .{name}) catch unreachable; } else { if (in.assign_target != .none and (kind == .private_method or kind == .private_static_method)) { r = logger.Range{ .loc = e_.index.loc, .len = @intCast(i32, name.len) }; - p.log.addRangeWarningFmt(p.source, r, p.allocator, "Writing to read-only method \"{s}\" will throw", .{name}) catch unreachable; + p.log.addRangeWarningFmt(p.source, r, p.allocator, "Writing to read-only method \"{any}\" will throw", .{name}) catch unreachable; } else if (in.assign_target != .none and (kind == .private_get or kind == .private_static_get)) { r = logger.Range{ .loc = e_.index.loc, .len = @intCast(i32, name.len) }; - p.log.addRangeWarningFmt(p.source, r, p.allocator, "Writing to getter-only property \"{s}\" will throw", .{name}) catch unreachable; + p.log.addRangeWarningFmt(p.source, r, p.allocator, "Writing to getter-only property \"{any}\" will throw", .{name}) catch unreachable; } else if (in.assign_target != .replace and (kind == .private_set or kind == .private_static_set)) { r = logger.Range{ .loc = e_.index.loc, .len = @intCast(i32, name.len) }; - p.log.addRangeWarningFmt(p.source, r, p.allocator, "Reading from setter-only property \"{s}\" will throw", .{name}) catch unreachable; + p.log.addRangeWarningFmt(p.source, r, p.allocator, "Reading from setter-only property \"{any}\" will throw", .{name}) catch unreachable; } } @@ -14628,7 +14628,7 @@ fn NewParser_( p.source, r, p.allocator, - "Cannot assign to property on import \"{s}\"", + "Cannot assign to property on import \"{any}\"", .{p.symbols.items[e_.target.data.e_identifier.ref.innerIndex()].original_name}, ) catch unreachable; } @@ -15158,7 +15158,7 @@ fn NewParser_( p.log.addError(p.source, expr.loc, "macro threw exception") catch unreachable; } } else { - p.log.addErrorFmt(p.source, expr.loc, p.allocator, "{s} error in macro", .{@errorName(err)}) catch unreachable; + p.log.addErrorFmt(p.source, expr.loc, p.allocator, "{any} error in macro", .{@errorName(err)}) catch unreachable; } return expr; }; @@ -16001,7 +16001,7 @@ fn NewParser_( // non-local symbols as errors in JavaScript. if (!is_typescript_enabled) { const r = js_lexer.rangeOfIdentifier(p.source, item.name.loc); - try p.log.addRangeErrorFmt(p.source, r, p.allocator, "\"{s}\" is not declared in this file", .{name}); + try p.log.addRangeErrorFmt(p.source, r, p.allocator, "\"{any}\" is not declared in this file", .{name}); } continue; } @@ -16022,7 +16022,7 @@ fn NewParser_( // non-local symbols as errors in JavaScript. if (!is_typescript_enabled) { const r = js_lexer.rangeOfIdentifier(p.source, item.name.loc); - try p.log.addRangeErrorFmt(p.source, r, p.allocator, "\"{s}\" is not declared in this file", .{name}); + try p.log.addRangeErrorFmt(p.source, r, p.allocator, "\"{any}\" is not declared in this file", .{name}); continue; } continue; @@ -16364,7 +16364,7 @@ fn NewParser_( label.ref = res.ref; if (res.found and !res.is_loop) { const r = js_lexer.rangeOfIdentifier(p.source, stmt.loc); - p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot \"continue\" to label {s}", .{name}) catch unreachable; + p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot \"continue\" to label {any}", .{name}) catch unreachable; } } else if (!p.fn_or_arrow_data_visit.is_inside_loop) { const r = js_lexer.rangeOfIdentifier(p.source, stmt.loc); @@ -17242,7 +17242,7 @@ fn NewParser_( } }, else => { - Global.panic("Unexpected binding type in namespace. This is a bug. {s}", .{binding}); + Global.panic("Unexpected binding type in namespace. This is a bug. {any}", .{binding}); }, } } @@ -17632,7 +17632,7 @@ fn NewParser_( // s.Kind = p.selectLocalKind(s.Kind) }, else => { - p.panic("Unexpected stmt in visitForLoopInit: {s}", .{stmt}); + p.panic("Unexpected stmt in visitForLoopInit: {any}", .{stmt}); }, } @@ -17783,7 +17783,7 @@ fn NewParser_( p.source, js_lexer.rangeOfIdentifier(p.source, binding.loc), p.allocator, - "\"{s}\" cannot be bound multiple times in the same parameter list", + "\"{any}\" cannot be bound multiple times in the same parameter list", .{name}, ) catch unreachable; } @@ -17834,7 +17834,7 @@ fn NewParser_( } }, else => { - p.panic("Unexpected binding {s}", .{binding}); + p.panic("Unexpected binding {any}", .{binding}); }, } } @@ -17903,7 +17903,7 @@ fn NewParser_( } const r = js_lexer.rangeOfIdentifier(p.source, loc); - p.log.addRangeErrorFmt(p.source, r, p.allocator, "There is no containing label named \"{s}\"", .{name}) catch unreachable; + p.log.addRangeErrorFmt(p.source, r, p.allocator, "There is no containing label named \"{any}\"", .{name}) catch unreachable; // Allocate an "unbound" symbol var ref = p.newSymbol(.unbound, name) catch unreachable; diff --git a/src/js_printer.zig b/src/js_printer.zig index ad5be8b23ab326..e0da6542f759b9 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -1920,7 +1920,7 @@ pub fn NewPrinter( if (e.func.name) |sym| { p.maybePrintSpace(); - p.printSymbol(sym.ref orelse Global.panic("internal error: expected E.Function's name symbol to have a ref\n{s}", .{e.func})); + p.printSymbol(sym.ref orelse Global.panic("internal error: expected E.Function's name symbol to have a ref\n{any}", .{e.func})); } p.printFunc(e.func); @@ -1940,7 +1940,7 @@ pub fn NewPrinter( if (e.class_name) |name| { p.maybePrintSpace(); p.addSourceMapping(name.loc); - p.printSymbol(name.ref orelse Global.panic("internal error: expected E.Class's name symbol to have a ref\n{s}", .{e})); + p.printSymbol(name.ref orelse Global.panic("internal error: expected E.Class's name symbol to have a ref\n{any}", .{e})); p.maybePrintSpace(); } p.printClass(e.*); @@ -2367,7 +2367,7 @@ pub fn NewPrinter( } }, else => { - // Global.panic("Unexpected expression of type {s}", .{std.meta.activeTag(expr.data}); + // Global.panic("Unexpected expression of type {any}", .{std.meta.activeTag(expr.data}); }, } } @@ -2920,7 +2920,7 @@ pub fn NewPrinter( p.print("}"); }, else => { - Global.panic("Unexpected binding of type {s}", .{binding}); + Global.panic("Unexpected binding of type {any}", .{binding}); }, } } @@ -2949,8 +2949,8 @@ pub fn NewPrinter( .s_function => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); - const name = s.func.name orelse Global.panic("Internal error: expected func to have a name ref\n{s}", .{s}); - const nameRef = name.ref orelse Global.panic("Internal error: expected func to have a name\n{s}", .{s}); + const name = s.func.name orelse Global.panic("Internal error: expected func to have a name ref\n{any}", .{s}); + const nameRef = name.ref orelse Global.panic("Internal error: expected func to have a name\n{any}", .{s}); if (s.func.flags.contains(.is_export)) { if (!rewrite_esm_to_cjs) { @@ -3107,7 +3107,7 @@ pub fn NewPrinter( if (class.class.class_name) |name| { p.print("class "); - p.printSymbol(name.ref orelse Global.panic("Internal error: Expected class to have a name ref\n{s}", .{class})); + p.printSymbol(name.ref orelse Global.panic("Internal error: Expected class to have a name ref\n{any}", .{class})); } else { p.print("class"); } @@ -3129,7 +3129,7 @@ pub fn NewPrinter( } }, else => { - Global.panic("Internal error: unexpected export default stmt data {s}", .{s}); + Global.panic("Internal error: unexpected export default stmt data {any}", .{s}); }, } }, @@ -3596,7 +3596,7 @@ pub fn NewPrinter( }, .s_label => |s| { p.printIndent(); - p.printSymbol(s.name.ref orelse Global.panic("Internal error: expected label to have a name {s}", .{s})); + p.printSymbol(s.name.ref orelse Global.panic("Internal error: expected label to have a name {any}", .{s})); p.print(":"); p.printBody(s.stmt); }, @@ -4084,7 +4084,7 @@ pub fn NewPrinter( const to_print: []const u8 = if (slice.len > 1024) slice[slice.len - 1024 ..] else slice; if (to_print.len > 0) { - Global.panic("\nvoluntary crash while printing:\n{s}\n---This is a bug. Not your fault.\n", .{to_print}); + Global.panic("\nvoluntary crash while printing:\n{any}\n---This is a bug. Not your fault.\n", .{to_print}); } else { Global.panic("\nvoluntary crash while printing. This is a bug. Not your fault.\n", .{}); } @@ -4316,7 +4316,7 @@ pub fn NewPrinter( // for(;) .s_empty => {}, else => { - Global.panic("Internal error: Unexpected stmt in for loop {s}", .{initSt}); + Global.panic("Internal error: Unexpected stmt in for loop {any}", .{initSt}); }, } } diff --git a/src/jsc.zig b/src/jsc.zig index 07f5d4d26fa906..252b9b8dfd8ae3 100644 --- a/src/jsc.zig +++ b/src/jsc.zig @@ -56,7 +56,7 @@ const Output = @import("./output.zig"); const __jsc_log = Output.scoped(.JSC, true); pub inline fn markBinding(src: std.builtin.SourceLocation) void { if (comptime is_bindgen) unreachable; - __jsc_log("{s} ({s}:{d})", .{ src.fn_name, src.file, src.line }); + __jsc_log("{any} ({any}:{d})", .{ src.fn_name, src.file, src.line }); } pub const Subprocess = @import("./bun.js/api/bun.zig").Subprocess; diff --git a/src/json_parser.zig b/src/json_parser.zig index 9bd7422db97105..eb11d555244618 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -271,7 +271,7 @@ fn JSONLikeParser_( // Warn about duplicate keys if (duplicate_get_or_put.found_existing) { - p.log.addRangeWarningFmt(p.source(), key_range, p.allocator, "Duplicate key \"{s}\" in object literal", .{p.lexer.string_literal_slice}) catch unreachable; + p.log.addRangeWarningFmt(p.source(), key_range, p.allocator, "Duplicate key \"{any}\" in object literal", .{p.lexer.string_literal_slice}) catch unreachable; } } @@ -303,7 +303,7 @@ fn JSONLikeParser_( try p.lexer.unexpected(); if (comptime Environment.isDebug) { - std.io.getStdErr().writer().print("\nThis range: {d} - {d} \n{s}", .{ + std.io.getStdErr().writer().print("\nThis range: {any} - {any} \n{any}", .{ p.lexer.range().loc.start, p.lexer.range().end(), p.lexer.range().in(p.lexer.source.contents), @@ -668,7 +668,7 @@ pub fn toAST( @compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'"); } }, - else => @compileError(std.fmt.comptimePrint("Unsupported type: {s} - {s}", .{ @tagName(type_info), @typeName(Type) })), + else => @compileError(std.fmt.comptimePrint("Unsupported type: {any} - {any}", .{ @tagName(type_info), @typeName(Type) })), } } @@ -931,7 +931,7 @@ fn expectPrintedJSON(_contents: string, expected: string) !void { const expr = try ParseJSON(&source, &log, default_allocator); if (log.msgs.items.len > 0) { - Global.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text }); + Global.panic("--FAIL--\nExpr {any}\nLog: {any}\n--FAIL--", .{ expr, log.msgs.items[0].data.text }); } var buffer_writer = try js_printer.BufferWriter.init(default_allocator); diff --git a/src/linear_fifo.zig b/src/linear_fifo.zig index 9ce8881eab1cc7..96e213fb05fba2 100644 --- a/src/linear_fifo.zig +++ b/src/linear_fifo.zig @@ -472,7 +472,7 @@ test "LinearFifo(u8, .Dynamic)" { fifo.shrink(0); { - try fifo.writer().print("{s}, {s}!", .{ "Hello", "World" }); + try fifo.writer().print("{any}, {any}!", .{ "Hello", "World" }); var result: [30]u8 = undefined; try testing.expectEqualSlices(u8, "Hello, World!", result[0..fifo.read(&result)]); try testing.expectEqual(@as(usize, 0), fifo.readableLength()); diff --git a/src/linker.zig b/src/linker.zig index 7807d5cd6062eb..61b5488fbf7ef2 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -174,7 +174,7 @@ pub const Linker = struct { pub inline fn nodeModuleBundleImportPath(this: *const ThisLinker, origin: URL) string { if (this.options.platform.isBun()) return "/node_modules.server.bun"; - return std.fmt.allocPrint(this.allocator, "{s}://{}{s}", .{ origin.displayProtocol(), origin.displayHost(), this.options.node_modules_bundle.?.bundle.import_from_name }) catch unreachable; + return std.fmt.allocPrint(this.allocator, "{any}://{}{any}", .{ origin.displayProtocol(), origin.displayHost(), this.options.node_modules_bundle.?.bundle.import_from_name }) catch unreachable; } // pub const Scratch = struct { @@ -509,7 +509,7 @@ pub const Linker = struct { // null, // logger.Loc.Empty, // linker.allocator, - // "New dependency import: \"{s}/{s}\"\nPlease run `bun bun` to update the .bun.", + // "New dependency import: \"{any}/{any}\"\nPlease run `bun bun` to update the .bun.", // .{ // package_json.name, // package_relative_path, @@ -579,7 +579,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Unexpected version \"{s}\" in import specifier \"{s}\". When a package.json is present, please use one of the \"dependencies\" fields in package.json for setting dependency versions", + "Unexpected version \"{any}\" in import specifier \"{any}\". When a package.json is present, please use one of the \"dependencies\" fields in package.json for setting dependency versions", .{ pkg.version, import_record.path.text }, import_record.kind, err, @@ -589,7 +589,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Unexpected version in import specifier \"{s}\". When a package.json is present, please use one of the \"dependencies\" fields in package.json to specify the version", + "Unexpected version in import specifier \"{any}\". When a package.json is present, please use one of the \"dependencies\" fields in package.json to specify the version", .{import_record.path.text}, import_record.kind, err, @@ -615,7 +615,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Version \"{s}\" not found for package \"{s}\" (while resolving \"{s}\")", + "Version \"{any}\" not found for package \"{any}\" (while resolving \"{any}\")", .{ pkg.version, package_name, import_record.path.text }, import_record.kind, err, @@ -625,7 +625,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Package version not found: \"{s}\"", + "Package version not found: \"{any}\"", .{import_record.path.text}, import_record.kind, err, @@ -650,7 +650,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Version \"{s}\" not found for package \"{s}\" (while resolving \"{s}\")", + "Version \"{any}\" not found for package \"{any}\" (while resolving \"{any}\")", .{ pkg.version, package_name, import_record.path.text }, import_record.kind, err, @@ -660,7 +660,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Package tag not found: \"{s}\"", + "Package tag not found: \"{any}\"", .{import_record.path.text}, import_record.kind, err, @@ -686,7 +686,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Package not found: \"{s}\" (while resolving \"{s}\")", + "Package not found: \"{any}\" (while resolving \"{any}\")", .{ package_name, import_record.path.text }, import_record.kind, err, @@ -696,7 +696,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Package not found: \"{s}\"", + "Package not found: \"{any}\"", .{package_name}, import_record.kind, err, @@ -725,7 +725,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Could not resolve: \"{s}\". Try setting --platform=\"node\" (after bun build exists)", + "Could not resolve: \"{any}\". Try setting --platform=\"node\" (after bun build exists)", .{import_record.path.text}, import_record.kind, err, @@ -736,7 +736,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Could not resolve: \"{s}\". Maybe you need to \"bun install\"?", + "Could not resolve: \"{any}\". Maybe you need to \"bun install\"?", .{import_record.path.text}, import_record.kind, err, @@ -748,7 +748,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "Could not resolve: \"{s}\"", + "Could not resolve: \"{any}\"", .{ import_record.path.text, }, @@ -765,7 +765,7 @@ pub const Linker = struct { &result.source, import_record.range, linker.allocator, - "{s} resolving \"{s}\"", + "{any} resolving \"{any}\"", .{ @errorName(err), import_record.path.text, @@ -891,7 +891,7 @@ pub const Linker = struct { return Fs.Path.init(try std.fmt.allocPrint( linker.allocator, // assumption: already starts with "node:" - "{s}/{s}", + "{any}/{any}", .{ strings.withoutTrailingSlash(origin.href), strings.withoutLeadingSlash(source_path), diff --git a/src/logger.zig b/src/logger.zig index 03cbc6c206864b..5db32f05f11e3d 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -272,7 +272,7 @@ pub const Data = struct { try to.writeAll(message_color); } - try std.fmt.format(to, comptime Output.prettyFmt("{s}\n", enable_ansi_colors), .{this.text}); + try std.fmt.format(to, comptime Output.prettyFmt("{any}\n", enable_ansi_colors), .{this.text}); if (this.location) |location| { if (location.line_text) |line_text_| { @@ -358,7 +358,7 @@ pub const Data = struct { } else {} } - try std.fmt.format(to, comptime Output.prettyFmt("{s}", enable_ansi_colors), .{ + try std.fmt.format(to, comptime Output.prettyFmt("{any}", enable_ansi_colors), .{ location.file, }); @@ -518,7 +518,7 @@ pub const Msg = struct { comptime _: bool, ) !void { if (msg.data.location) |location| { - try writer.print("{s}: {s}\n{s}\n{s}:{}:{} ({d})", .{ + try writer.print("{any}: {any}\n{any}\n{any}:{}:{} ({d})", .{ msg.kind.string(), msg.data.text, location.line_text, @@ -528,7 +528,7 @@ pub const Msg = struct { location.offset, }); } else { - try writer.print("{s}: {s}", .{ + try writer.print("{any}: {any}", .{ msg.kind.string(), msg.data.text, }); @@ -536,7 +536,7 @@ pub const Msg = struct { } pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(Global.panic)) void { - formatterFunc("\n\n{s}: {s}\n{s}\n{s}:{}:{} ({d})", .{ + formatterFunc("\n\n{any}: {any}\n{any}\n{any}:{}:{} ({d})", .{ msg.kind.string(), msg.data.text, msg.data.location.?.line_text, diff --git a/src/meta.zig b/src/meta.zig index cd171cc29c80a2..c40f3ce8df3d41 100644 --- a/src/meta.zig +++ b/src/meta.zig @@ -22,5 +22,5 @@ pub fn typeBaseName(comptime fullname: []const u8) []const u8 { const idx = comptime std.mem.lastIndexOf(u8, fullname, "."); const name = if (idx == null) fullname else fullname[(idx.? + 1)..]; - return comptime std.fmt.comptimePrint("{s}", .{name}); + return comptime std.fmt.comptimePrint("{any}", .{name}); } diff --git a/src/napi/napi.zig b/src/napi/napi.zig index dc685ffe1644e1..a1c42424756585 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -1030,10 +1030,10 @@ pub export fn napi_fatal_error(location_ptr: ?[*:0]const u8, location_len: usize const location = napiSpan(location_ptr, location_len); if (location.len > 0) { - bun.Global.panic("napi: {s}\n {s}", .{ message, location }); + bun.Global.panic("napi: {any}\n {any}", .{ message, location }); } - bun.Global.panic("napi: {s}", .{message}); + bun.Global.panic("napi: {any}", .{message}); } pub export fn napi_create_buffer(env: napi_env, length: usize, data: [*]*anyopaque, result: *napi_value) napi_status { var buf = JSC.ExternalBuffer.create(null, @ptrCast([*]u8, data)[0..length], env, null, env.bunVM().allocator) catch { diff --git a/src/network_thread.zig b/src/network_thread.zig index a3baebacb4846a..088c1919c69e48 100644 --- a/src/network_thread.zig +++ b/src/network_thread.zig @@ -90,7 +90,7 @@ pub fn onStartIOThread(waker: AsyncIO.Waker) void { } } - Output.prettyErrorln("error: Failed to initialize network thread: {s}.\nHTTP requests will not work. Please file an issue and run strace().", .{@errorName(err)}); + Output.prettyErrorln("error: Failed to initialize network thread: {any}.\nHTTP requests will not work. Please file an issue and run strace().", .{@errorName(err)}); } Global.exit(1); diff --git a/src/node_module_bundle.zig b/src/node_module_bundle.zig index bf8fdb8dacf1fc..37f9f9984ad75d 100644 --- a/src/node_module_bundle.zig +++ b/src/node_module_bundle.zig @@ -117,8 +117,8 @@ pub const NodeModuleBundle = struct { Output.prettyErrorln( \\Fatal: incorrect package sorting order detected in .bun file.\n \\This is a bug! Please create an issue.\n - \\If this bug blocks you from doing work, for now - \\please avoid having multiple versions of "{s}" in the same bundle.\n + \\If this bug blocks you from doing work, for now + \\please avoid having multiple versions of "{any}" in the same bundle.\n \\\n \\- Jarred" , @@ -174,7 +174,7 @@ pub const NodeModuleBundle = struct { ) !string { return try std.fmt.allocPrint( allocator, - "{x}/{s}", + "{x}/{any}", .{ this.bundle.packages[to.package_id].hash, this.str(to.path), @@ -381,7 +381,7 @@ pub const NodeModuleBundle = struct { const modules = this.bundle.modules[pkg.modules_offset .. pkg.modules_offset + pkg.modules_length]; Output.prettyln( - "{s} v{s}", + "{any} v{any}", .{ this.str(pkg.name), this.str(pkg.version) }, ); @@ -396,7 +396,7 @@ pub const NodeModuleBundle = struct { Output.print(indent, .{}); prettySize(module.code.length, size_level, ">"); Output.prettyln( - indent ++ "{s}" ++ std.fs.path.sep_str ++ "{s} [{d}]\n", + indent ++ "{any}" ++ std.fs.path.sep_str ++ "{any} [{d}]\n", .{ this.str(pkg.name), this.str(module.path), diff --git a/src/open.zig b/src/open.zig index 36c4a9a3535be6..ca5a97e8518062 100644 --- a/src/open.zig +++ b/src/open.zig @@ -19,7 +19,7 @@ const opener = switch (@import("builtin").target.os.tag) { pub fn openURL(url: string) !void { if (comptime Environment.isWasi) { - Output.prettyln("-> {s}", .{url}); + Output.prettyln("-> {any}", .{url}); Output.flush(); return; } @@ -257,12 +257,12 @@ pub const Editor = enum(u8) { try file_path_buf_writer.writeAll(file); if (line) |line_| { if (line_.len > 0) { - try file_path_buf_writer.print(":{s}", .{line_}); + try file_path_buf_writer.print(":{any}", .{line_}); if (!editor.isJetBrains()) { if (column) |col| { if (col.len > 0) - try file_path_buf_writer.print(":{s}", .{col}); + try file_path_buf_writer.print(":{any}", .{col}); } } } @@ -281,11 +281,11 @@ pub const Editor = enum(u8) { args_buf[i] = "--line"; i += 1; - try file_path_buf_writer.print("{s}", .{line_}); + try file_path_buf_writer.print("{any}", .{line_}); if (column) |col| { if (col.len > 0) - try file_path_buf_writer.print(":{s}", .{col}); + try file_path_buf_writer.print(":{any}", .{col}); } var line_column = file_path_buf_stream.getWritten()[file_path.len..]; @@ -339,7 +339,7 @@ pub const EditorContext = struct { pub fn openInEditor(this: *EditorContext, editor_: Editor, blob: []const u8, id: string, tmpdir: std.fs.Dir, line: string, column: string) void { _openInEditor(this.path, editor_, blob, id, tmpdir, line, column) catch |err| { if (editor_ != .other) { - Output.prettyErrorln("Error {s} opening in {s}", .{ @errorName(err), @tagName(editor_) }); + Output.prettyErrorln("Error {any} opening in {any}", .{ @errorName(err), @tagName(editor_) }); } this.editor = Editor.none; diff --git a/src/options.zig b/src/options.zig index 24c0cdc2af7443..57c8ee7ce5c7c0 100644 --- a/src/options.zig +++ b/src/options.zig @@ -57,7 +57,7 @@ pub fn validatePath( null, logger.Loc.Empty, allocator, - "{s} resolving external: \"{s}\"", + "{any} resolving external: \"{any}\"", .{ @errorName(err), rel_path }, ) catch unreachable; return ""; @@ -150,7 +150,7 @@ pub const ExternalModules = struct { const path = external; if (strings.indexOfChar(path, '*')) |i| { if (strings.indexOfChar(path[i + 1 .. path.len], '*') != null) { - log.addErrorFmt(null, logger.Loc.Empty, allocator, "External path \"{s}\" cannot have more than one \"*\" wildcard", .{external}) catch unreachable; + log.addErrorFmt(null, logger.Loc.Empty, allocator, "External path \"{any}\" cannot have more than one \"*\" wildcard", .{external}) catch unreachable; return result; } @@ -1380,7 +1380,7 @@ pub const BundleOptions = struct { opts.origin = URL.parse( try std.fmt.allocPrint( allocator, - "{s}://localhost:{s}{s}", + "{any}://localhost:{any}{any}", .{ protocol, port, @@ -1416,7 +1416,7 @@ pub const BundleOptions = struct { if (node_modules_bundle_existing) |node_mods| { opts.node_modules_bundle = node_mods; const pretty_path = fs.relativeTo(transform.node_modules_bundle_path.?); - opts.node_modules_bundle_url = try std.fmt.allocPrint(allocator, "{s}{s}", .{ + opts.node_modules_bundle_url = try std.fmt.allocPrint(allocator, "{any}{any}", .{ opts.origin, pretty_path, }); @@ -1427,7 +1427,7 @@ pub const BundleOptions = struct { var bundle_file = std.fs.openFileAbsolute(bundle_path, .{ .mode = .read_write }) catch |err| { Output.disableBuffering(); defer Output.enableBuffering(); - Output.prettyErrorln("error opening \"{s}\": {s}", .{ pretty_path, @errorName(err) }); + Output.prettyErrorln("error opening \"{any}\": {any}", .{ pretty_path, @errorName(err) }); break :load_bundle; }; @@ -1454,7 +1454,7 @@ pub const BundleOptions = struct { const elapsed = @intToFloat(f64, (std.time.nanoTimestamp() - time_start)) / std.time.ns_per_ms; Output.printElapsed(elapsed); Output.prettyErrorln( - " \"{s}\" - {d} modules, {d} packages", + " \"{any}\" - {d} modules, {d} packages", .{ pretty_path, bundle.bundle.modules.len, @@ -1465,7 +1465,7 @@ pub const BundleOptions = struct { } else |err| { Output.disableBuffering(); Output.prettyErrorln( - "error reading \"{s}\": {s}, deleting it so you don't keep seeing this message.", + "error reading \"{any}\": {any}, deleting it so you don't keep seeing this message.", .{ pretty_path, @errorName(err) }, ); bundle_file.close(); @@ -1555,14 +1555,14 @@ pub const BundleOptions = struct { }, error.AccessDenied => { Output.prettyErrorln( - "error: access denied when trying to open directory for static files: \"{s}\".\nPlease re-open bun with access to this folder or pass a different folder via \"--public-dir\". Note: --public-dir is relative to --cwd (or the process' current working directory).\n\nThe public folder is where static assets such as images, fonts, and .html files go.", + "error: access denied when trying to open directory for static files: \"{any}\".\nPlease re-open bun with access to this folder or pass a different folder via \"--public-dir\". Note: --public-dir is relative to --cwd (or the process' current working directory).\n\nThe public folder is where static assets such as images, fonts, and .html files go.", .{opts.routes.static_dir}, ); std.process.exit(1); }, else => { Output.prettyErrorln( - "error: \"{s}\" when accessing public folder: \"{s}\"", + "error: \"{any}\" when accessing public folder: \"{any}\"", .{ @errorName(err), opts.routes.static_dir }, ); std.process.exit(1); @@ -1584,7 +1584,7 @@ pub const BundleOptions = struct { error.FileNotFound => {}, else => { Output.prettyErrorln( - "{s} when trying to open {s}/index.html. single page app routing is disabled.", + "{any} when trying to open {any}/index.html. single page app routing is disabled.", .{ @errorName(err), opts.routes.static_dir }, ); }, @@ -1614,7 +1614,7 @@ pub const BundleOptions = struct { error.FileNotFound => {}, else => { Output.prettyErrorln( - "{s} when trying to open {s}/index.html. single page app routing is disabled.", + "{any} when trying to open {any}/index.html. single page app routing is disabled.", .{ @errorName(err), fs.top_level_dir }, ); }, @@ -1671,12 +1671,12 @@ pub const BundleOptions = struct { pub fn openOutputDir(output_dir: string) !std.fs.Dir { return std.fs.cwd().openDir(output_dir, std.fs.Dir.OpenDirOptions{ .iterate = true }) catch brk: { std.fs.cwd().makeDir(output_dir) catch |err| { - Output.printErrorln("error: Unable to mkdir \"{s}\": \"{s}\"", .{ output_dir, @errorName(err) }); + Output.printErrorln("error: Unable to mkdir \"{any}\": \"{any}\"", .{ output_dir, @errorName(err) }); Global.crash(); }; var handle = std.fs.cwd().openDir(output_dir, std.fs.Dir.OpenDirOptions{ .iterate = true }) catch |err2| { - Output.printErrorln("error: Unable to open \"{s}\": \"{s}\"", .{ output_dir, @errorName(err2) }); + Output.printErrorln("error: Unable to open \"{any}\": \"{any}\"", .{ output_dir, @errorName(err2) }); Global.crash(); }; break :brk handle; diff --git a/src/pool.zig b/src/pool.zig index 344e9ca696f1c7..031ec4ba2a241c 100644 --- a/src/pool.zig +++ b/src/pool.zig @@ -192,7 +192,7 @@ pub fn ObjectPool( } } - if (comptime log_allocations) std.io.getStdErr().writeAll(comptime std.fmt.comptimePrint("Allocate {s} - {d} bytes\n", .{ @typeName(Type), @sizeOf(Type) })) catch {}; + if (comptime log_allocations) std.io.getStdErr().writeAll(comptime std.fmt.comptimePrint("Allocate {any} - {d} bytes\n", .{ @typeName(Type), @sizeOf(Type) })) catch {}; var new_node = allocator.create(LinkedList.Node) catch unreachable; new_node.* = LinkedList.Node{ @@ -215,7 +215,7 @@ pub fn ObjectPool( pub fn release(node: *LinkedList.Node) void { if (comptime max_count > 0) { if (data().count >= max_count) { - if (comptime log_allocations) std.io.getStdErr().writeAll(comptime std.fmt.comptimePrint("Free {s} - {d} bytes\n", .{ @typeName(Type), @sizeOf(Type) })) catch {}; + if (comptime log_allocations) std.io.getStdErr().writeAll(comptime std.fmt.comptimePrint("Free {any} - {d} bytes\n", .{ @typeName(Type), @sizeOf(Type) })) catch {}; if (comptime std.meta.trait.isContainer(Type) and @hasDecl(Type, "deinit")) node.data.deinit(); node.allocator.destroy(node); return; diff --git a/src/renamer.zig b/src/renamer.zig index 3c67e1f5649d66..5f6e92719c2cee 100644 --- a/src/renamer.zig +++ b/src/renamer.zig @@ -34,7 +34,7 @@ pub const Renamer = struct { if (renamer.symbols.getConst(resolved)) |symbol| { return symbol.original_name; } else { - Global.panic("Invalid symbol {s} in {s}", .{ ref, renamer.source.path.text }); + Global.panic("Invalid symbol {any} in {any}", .{ ref, renamer.source.path.text }); } } }; diff --git a/src/report.zig b/src/report.zig index f1ded35984d2b1..0a47a9e9848ee5 100644 --- a/src/report.zig +++ b/src/report.zig @@ -29,7 +29,7 @@ pub const CrashReportWriter = struct { pub fn printFrame(_: ?*anyopaque, frame: CrashReporter.StackFrame) void { const function_name = if (frame.function_name.len > 0) frame.function_name else "[function ?]"; const filename = if (frame.filename.len > 0) frame.function_name else "[file ?]"; - crash_report_writer.print("[0x{X}] - {s} {s}:{d}\n", .{ frame.pc, function_name, filename, frame.line_number }); + crash_report_writer.print("[0x{X}] - {any} {any}:{d}\n", .{ frame.pc, function_name, filename, frame.line_number }); } pub fn dump() void { @@ -68,7 +68,7 @@ pub const CrashReportWriter = struct { } const file_path = std.fmt.bufPrintZ( &crash_reporter_path, - "{s}/.bun-crash/v{s}-{d}.crash", + "{any}/.bun-crash/v{any}-{d}.crash", .{ base_dir, Global.package_json_version, @intCast(u64, @max(std.time.milliTimestamp(), 0)) }, ) catch return; @@ -93,9 +93,9 @@ pub const CrashReportWriter = struct { } if (tilda) { - Output.prettyError("\nCrash report saved to:\n ~{s}\n", .{display_path}); + Output.prettyError("\nCrash report saved to:\n ~{any}\n", .{display_path}); } else { - Output.prettyError("\nCrash report saved to:\n {s}\n", .{display_path}); + Output.prettyError("\nCrash report saved to:\n {any}\n", .{display_path}); } } } @@ -118,8 +118,8 @@ pub fn printMetadata() void { crash_report_writer.print( \\ \\----- bun meta ----- - ++ "\nBun v" ++ Global.package_json_version_with_sha ++ " " ++ platform ++ " " ++ arch ++ " {s}\n" ++ - \\{s}: {} + ++ "\nBun v" ++ Global.package_json_version_with_sha ++ " " ++ platform ++ " " ++ arch ++ " {any}\n" ++ + \\{any}: {} \\ , .{ analytics_platform.version, @@ -178,12 +178,12 @@ pub fn fatal(err_: ?anyerror, msg_: ?string) void { if (err_) |err| { if (Output.isEmojiEnabled()) { crash_report_writer.print( - "\nerror: {s}\n", + "\nerror: {any}\n", .{@errorName(err)}, ); } else { crash_report_writer.print( - "\nerror: {s}\n\n", + "\nerror: {any}\n\n", .{@errorName(err)}, ); } @@ -197,12 +197,12 @@ pub fn fatal(err_: ?anyerror, msg_: ?string) void { if (len > 0) { if (Output.isEmojiEnabled()) { crash_report_writer.print( - "\nuh-oh: {s}\n", + "\nuh-oh: {any}\n", .{msg[0..len]}, ); } else { crash_report_writer.print( - "\nan uh-oh: {s}\n\n", + "\nan uh-oh: {any}\n\n", .{msg[0..len]}, ); } @@ -250,7 +250,7 @@ var globalError_ranOnce = false; export fn Bun__crashReportWrite(ctx: *CrashReportWriter, bytes_ptr: [*]const u8, len: usize) void { if (len > 0) - ctx.print("{s}\n", .{bytes_ptr[0..len]}); + ctx.print("{any}\n", .{bytes_ptr[0..len]}); } extern "C" fn Bun__crashReportDumpStackTrace(ctx: *anyopaque) void; @@ -270,7 +270,7 @@ pub noinline fn handleCrash(signal: i32, addr: usize) void { }; crash_report_writer.print( - "\n{s} at 0x{any}\n\n", + "\n{any} at 0x{any}\n\n", .{ @errorName(name), std.fmt.fmtSliceHexUpper(std.mem.asBytes(&addr)) }, ); printMetadata(); @@ -385,8 +385,8 @@ pub noinline fn globalError(err: anyerror) noreturn { \\ \\If that still doesn't work, you may need to add these lines to /etc/security/limits.conf: \\ - \\ {s} soft nofile 2147483646 - \\ {s} hard nofile 2147483646 + \\ {any} soft nofile 2147483646 + \\ {any} hard nofile 2147483646 \\ , .{ user, user }, @@ -450,8 +450,8 @@ pub noinline fn globalError(err: anyerror) noreturn { \\ \\If that still doesn't work, you may need to add these lines to /etc/security/limits.conf: \\ - \\ {s} soft nofile 2147483646 - \\ {s} hard nofile 2147483646 + \\ {any} soft nofile 2147483646 + \\ {any} hard nofile 2147483646 \\ , .{ user, user }, @@ -490,8 +490,8 @@ pub noinline fn globalError(err: anyerror) noreturn { \\ \\If that still doesn't work, you may need to add these lines to /etc/security/limits.conf: \\ - \\ {s} soft nofile 2147483646 - \\ {s} hard nofile 2147483646 + \\ {any} soft nofile 2147483646 + \\ {any} hard nofile 2147483646 \\ , .{ diff --git a/src/router.zig b/src/router.zig index acc9b8be343bfc..cb39fa07971c9a 100644 --- a/src/router.zig +++ b/src/router.zig @@ -256,7 +256,7 @@ const RouteLoader = struct { &source, Logger.Loc.Empty, this.allocator, - "Route \"{s}\" is already defined by {s}", + "Route \"{any}\" is already defined by {any}", .{ route.name, entry.value_ptr.*.abs_path.slice() }, ) catch unreachable; return; @@ -280,7 +280,7 @@ const RouteLoader = struct { &source, Logger.Loc.Empty, this.allocator, - "Route \"{s}\" is already defined by {s}", + "Route \"{any}\" is already defined by {any}", .{ route.name, static_entry.value_ptr.*.abs_path.slice() }, ) catch unreachable; @@ -304,7 +304,7 @@ const RouteLoader = struct { &source, Logger.Loc.Empty, this.allocator, - "Route \"{s}\" is already defined by {s}", + "Route \"{any}\" is already defined by {any}", .{ route.name, entry.value_ptr.* }, ) catch unreachable; return; @@ -719,7 +719,7 @@ pub const Route = struct { route_file_buf[abs_path_str.len] = 0; var buf = route_file_buf[0..abs_path_str.len :0]; file = std.fs.openFileAbsoluteZ(buf, .{ .mode = .read_only }) catch |err| { - log.addErrorFmt(null, Logger.Loc.Empty, allocator, "{s} opening route: {s}", .{ @errorName(err), abs_path_str }) catch unreachable; + log.addErrorFmt(null, Logger.Loc.Empty, allocator, "{any} opening route: {any}", .{ @errorName(err), abs_path_str }) catch unreachable; return null; }; FileSystem.setMaxFd(file.handle); @@ -729,7 +729,7 @@ pub const Route = struct { } var _abs = std.os.getFdPath(file.handle, &route_file_buf) catch |err| { - log.addErrorFmt(null, Logger.Loc.Empty, allocator, "{s} resolving route: {s}", .{ @errorName(err), abs_path_str }) catch unreachable; + log.addErrorFmt(null, Logger.Loc.Empty, allocator, "{any} resolving route: {any}", .{ @errorName(err), abs_path_str }) catch unreachable; return null; }; @@ -1498,7 +1498,7 @@ test "Pattern Match" { const entries = part.@"1"; fail: { if (!Pattern.match(pathname, pattern, pattern, default_allocator, *ParamListType, ¶meters, true)) { - Output.prettyErrorln("Expected pattern \"{s}\" to match \"{s}\"", .{ pattern, pathname }); + Output.prettyErrorln("Expected pattern \"{any}\" to match \"{any}\"", .{ pattern, pathname }); failures += 1; break :fail; } @@ -1507,19 +1507,19 @@ test "Pattern Match" { for (parameters.items(.name)) |entry_name, i| { if (!strings.eql(entry_name, entries[i].name)) { failures += 1; - Output.prettyErrorln("{s} -- Expected name \"{s}\" but received \"{s}\" for path {s}", .{ pattern, entries[i].name, parameters.get(i).name, pathname }); + Output.prettyErrorln("{any} -- Expected name \"{any}\" but received \"{any}\" for path {any}", .{ pattern, entries[i].name, parameters.get(i).name, pathname }); break :fail; } if (!strings.eql(parameters.get(i).value, entries[i].value)) { failures += 1; - Output.prettyErrorln("{s} -- Expected value \"{s}\" but received \"{s}\" for path {s}", .{ pattern, entries[i].value, parameters.get(i).value, pathname }); + Output.prettyErrorln("{any} -- Expected value \"{any}\" but received \"{any}\" for path {any}", .{ pattern, entries[i].value, parameters.get(i).value, pathname }); break :fail; } } } if (parameters.len != entries.len) { - Output.prettyErrorln("Expected parameter count for \"{s}\" to match \"{s}\"", .{ pattern, pathname }); + Output.prettyErrorln("Expected parameter count for \"{any}\" to match \"{any}\"", .{ pattern, pathname }); failures += 1; break :fail; } diff --git a/src/runtime.zig b/src/runtime.zig index 87daae829b3656..146387a6d9b768 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -284,7 +284,7 @@ pub const Runtime = struct { return version_hash; } - const bytecodeCacheFilename = std.fmt.comptimePrint("__runtime.{s}", .{version_hash}); + const bytecodeCacheFilename = std.fmt.comptimePrint("__runtime.{any}", .{version_hash}); var bytecodeCacheFetcher = Fs.BytecodeCacheFetcher{}; pub fn byteCodeCacheFile(fs: *Fs.FileSystem.RealFS) ?bun.StoredFileDescriptorType { diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 2978d94f31852f..2c4f174e31ef5f 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -776,7 +776,7 @@ pub fn eqlLong(a_: string, b: string, comptime check_len: bool) bool { } pub inline fn append(allocator: std.mem.Allocator, self: string, other: string) ![]u8 { - return std.fmt.allocPrint(allocator, "{s}{s}", .{ self, other }); + return std.fmt.allocPrint(allocator, "{any}{any}", .{ self, other }); } pub inline fn joinBuf(out: []u8, parts: anytype, comptime parts_len: usize) []u8 { diff --git a/src/test/tester.zig b/src/test/tester.zig index 2c53acc81aa920..1be21111c970ed 100644 --- a/src/test/tester.zig +++ b/src/test/tester.zig @@ -42,7 +42,7 @@ pub const Tester = struct { stderr.writeAll(RESET) catch unreachable; stderr.writeAll(pad) catch unreachable; stderr.writeAll(DIM) catch unreachable; - std.fmt.format(stderr.writer(), "{s}:{d}:{d}", .{ self.source.file, self.source.line, self.source.column }) catch unreachable; + std.fmt.format(stderr.writer(), "{any}:{d}:{d}", .{ self.source.file, self.source.line, self.source.column }) catch unreachable; stderr.writeAll(RESET) catch unreachable; stderr.writeAll("\n") catch unreachable; @@ -50,7 +50,7 @@ pub const Tester = struct { stderr.writeAll("Expected: ") catch unreachable; stderr.writeAll(RESET) catch unreachable; stderr.writeAll(GREEN) catch unreachable; - std.fmt.format(stderr.writer(), "\"{s}\"", .{self.expected}) catch unreachable; + std.fmt.format(stderr.writer(), "\"{any}\"", .{self.expected}) catch unreachable; stderr.writeAll(GREEN) catch unreachable; stderr.writeAll(RESET) catch unreachable; @@ -59,7 +59,7 @@ pub const Tester = struct { stderr.writeAll("Received: ") catch unreachable; stderr.writeAll(RESET) catch unreachable; stderr.writeAll(RED) catch unreachable; - std.fmt.format(stderr.writer(), "\"{s}\"", .{self.result}) catch unreachable; + std.fmt.format(stderr.writer(), "\"{any}\"", .{self.result}) catch unreachable; stderr.writeAll(RED) catch unreachable; stderr.writeAll(RESET) catch unreachable; stderr.writeAll("\n") catch unreachable; @@ -129,17 +129,17 @@ pub const Tester = struct { std.log.info("No expectations.\n\n", .{}); }, .pass => { - std.fmt.format(stderr.writer(), "{s}All {d} expectations passed.{s}\n", .{ GREEN, tester.pass.items.len, GREEN }) catch unreachable; + std.fmt.format(stderr.writer(), "{any}All {d} expectations passed.{any}\n", .{ GREEN, tester.pass.items.len, GREEN }) catch unreachable; std.fmt.format(stderr.writer(), RESET, .{}) catch unreachable; std.testing.expect(true) catch std.debug.panic("Test failure", .{}); }, .fail => { - std.fmt.format(stderr.writer(), "{s}All {d} expectations failed.{s}\n\n", .{ RED, tester.fail.items.len, RED }) catch unreachable; + std.fmt.format(stderr.writer(), "{any}All {d} expectations failed.{any}\n\n", .{ RED, tester.fail.items.len, RED }) catch unreachable; std.fmt.format(stderr.writer(), RESET, .{}) catch unreachable; std.testing.expect(false) catch std.debug.panic("Test failure", .{}); }, .some_fail => { - std.fmt.format(stderr.writer(), "{s}{d} failed{s} and {s}{d} passed{s} of {d} expectations{s}\n\n", .{ + std.fmt.format(stderr.writer(), "{any}{d} failed{any} and {any}{d} passed{any} of {d} expectations{any}\n\n", .{ RED, tester.fail.items.len, RED ++ RESET, @@ -150,7 +150,7 @@ pub const Tester = struct { RESET, }) catch unreachable; std.fmt.format(stderr.writer(), RESET, .{}) catch unreachable; - std.testing.expect(false) catch std.debug.panic("Test failure in {s}: {s}:{d}:{d}", .{ src.fn_name, src.file, src.line, src.column }); + std.testing.expect(false) catch std.debug.panic("Test failure in {any}: {any}:{d}:{d}", .{ src.fn_name, src.file, src.line, src.column }); }, } } diff --git a/src/toml/toml_lexer.zig b/src/toml/toml_lexer.zig index 2ec11c72bf63fa..585d1c91368dee 100644 --- a/src/toml/toml_lexer.zig +++ b/src/toml/toml_lexer.zig @@ -95,7 +95,7 @@ pub const Lexer = struct { pub fn addDefaultError(self: *Lexer, msg: []const u8) !void { @setCold(true); - self.addError(self.start, "{s}", .{msg}, true); + self.addError(self.start, "{any}", .{msg}, true); return Error.SyntaxError; } @@ -325,7 +325,7 @@ pub const Lexer = struct { if (std.fmt.parseFloat(f64, text)) |num| { lexer.number = num; } else |_| { - try lexer.addSyntaxError(lexer.start, "Invalid number {s}", .{text}); + try lexer.addSyntaxError(lexer.start, "Invalid number {any}", .{text}); } } } @@ -1124,7 +1124,7 @@ pub const Lexer = struct { } }; - try lexer.addRangeError(lexer.range(), "Unexpected {s}", .{found}, true); + try lexer.addRangeError(lexer.range(), "Unexpected {any}", .{found}, true); } pub fn expectedString(self: *Lexer, text: string) !void { @@ -1136,7 +1136,7 @@ pub const Lexer = struct { } }; - try self.addRangeError(self.range(), "Expected {s} but found {s}", .{ text, found }, true); + try self.addRangeError(self.range(), "Expected {any} but found {any}", .{ text, found }, true); } pub fn range(self: *Lexer) logger.Range { diff --git a/src/url.zig b/src/url.zig index 56cc8182a543f6..a99fa825d3235f 100644 --- a/src/url.zig +++ b/src/url.zig @@ -189,19 +189,19 @@ pub const URL = struct { var out: [2048]u8 = undefined; const normalized_path = joinNormalize(&out, prefix, dirname, basename, extname); - try writer.print("{s}/{s}", .{ this.origin, normalized_path }); + try writer.print("{any}/{any}", .{ this.origin, normalized_path }); } pub fn joinAlloc(this: *const URL, allocator: std.mem.Allocator, prefix: string, dirname: string, basename: string, extname: string, absolute_path: string) !string { const has_uplevels = std.mem.indexOf(u8, dirname, "../") != null; if (has_uplevels) { - return try std.fmt.allocPrint(allocator, "{s}/abs:{s}", .{ this.origin, absolute_path }); + return try std.fmt.allocPrint(allocator, "{any}/abs:{any}", .{ this.origin, absolute_path }); } else { var out: [2048]u8 = undefined; const normalized_path = joinNormalize(&out, prefix, dirname, basename, extname); - return try std.fmt.allocPrint(allocator, "{s}/{s}", .{ this.origin, normalized_path }); + return try std.fmt.allocPrint(allocator, "{any}/{any}", .{ this.origin, normalized_path }); } } diff --git a/src/watcher.zig b/src/watcher.zig index e627e4b5196991..de6d7384af6259 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -350,7 +350,7 @@ pub fn NewWatcher(comptime ContextType: type) type { if (FeatureFlags.verbose_watcher) Output.prettyln("Watcher started", .{}); this._watchLoop() catch |err| { - Output.prettyErrorln("Watcher crashed: {s}", .{@errorName(err)}); + Output.prettyErrorln("Watcher crashed: {any}", .{@errorName(err)}); this.watchloop_handle = null; PlatformWatcher.stop(); @@ -796,9 +796,9 @@ pub fn NewWatcher(comptime ContextType: type) type { if (comptime FeatureFlags.verbose_watcher) { if (strings.indexOf(file_path, this.cwd)) |i| { - Output.prettyln("Added ./{s} to watch list.", .{file_path[i + this.cwd.len ..]}); + Output.prettyln("Added ./{any} to watch list.", .{file_path[i + this.cwd.len ..]}); } else { - Output.prettyln("Added {s} to watch list.", .{file_path}); + Output.prettyln("Added {any} to watch list.", .{file_path}); } } } From f25d48cb224338be9a17996bca181e71cdf99dbf Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 04:27:58 +0100 Subject: [PATCH 39/51] Fix: fmt fixing --- src/bun.js/webcore/streams.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 6d18c71d5ae6bb..6d4fcb6bf3c233 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -2013,7 +2013,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { fn send(this: *@This(), buf: []const u8) bool { std.debug.assert(!this.done); - defer log("send: {d} bytes (backpressure: {d})", .{ buf.len, this.has_backpressure }); + defer log("send: {d} bytes (backpressure: {any})", .{ buf.len, this.has_backpressure }); if (this.requested_end and !this.res.state().isHttpWriteCalled()) { const success = this.res.tryEnd(buf, this.end_len, false); @@ -2357,7 +2357,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { // In this case, it's always an error pub fn end(this: *@This(), err: ?Syscall.Error) JSC.Node.Maybe(void) { - log("end({?s})", .{err}); + log("end({any})", .{err}); if (this.requested_end) { return .{ .result = {} }; From 7f2fa8c05766d6bc655c665c3ee7cc813df2bb8d Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 04:35:25 +0100 Subject: [PATCH 40/51] Fix: makeOpenPath -> makeOpenPathIterable --- src/bun.js/module_loader.zig | 4 +-- src/cli/create_command.zig | 2 +- src/cli/upgrade_command.zig | 2 +- src/install/extract_tarball.zig | 4 +-- src/install/install.zig | 48 ++++++++++++++++----------------- src/resolver/resolver.zig | 3 +-- src/router.zig | 4 +-- 7 files changed, 33 insertions(+), 34 deletions(-) diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 6d834f489e039e..f100fd6e8be440 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -156,11 +156,11 @@ fn dumpSource(specifier: string, printer: anytype) !void { pub var dir: ?std.fs.Dir = null; }; if (BunDebugHolder.dir == null) { - BunDebugHolder.dir = try std.fs.cwd().makeOpenPath("/tmp/bun-debug-src/", .{ .iterate = true }); + BunDebugHolder.dir = try std.fs.cwd().makeOpenPathIterable("/tmp/bun-debug-src/", .{}); } if (std.fs.path.dirname(specifier)) |dir_path| { - var parent = try BunDebugHolder.dir.?.makeOpenPath(dir_path[1..], .{ .iterate = true }); + var parent = try BunDebugHolder.dir.?.makeOpenPathIterable(dir_path[1..], .{}); defer parent.close(); try parent.writeFile(std.fs.path.basename(specifier), printer.ctx.getWritten()); } else { diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 8d8d79a8a3214b..c42cf57a7ffdd2 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -573,7 +573,7 @@ pub const CreateCommand = struct { }; std.fs.deleteTreeAbsolute(destination) catch {}; - const destination_dir = std.fs.cwd().makeOpenPath(destination, .{ .iterate = true }) catch |err| { + const destination_dir = std.fs.cwd().makeOpenPathIterable(destination, .{}) catch |err| { node.end(); progress.refresh(); diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index c719a96ded6b97..ab9459385fc1da 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -511,7 +511,7 @@ pub const UpgradeCommand = struct { const version_name = version.name().?; var save_dir_ = filesystem.tmpdir(); - var save_dir = save_dir_.makeOpenPath(version_name, .{ .iterate = true }) catch { + var save_dir = save_dir_.makeOpenPathIterable(version_name, .{}) catch { Output.prettyErrorln("error: Failed to open temporary directory", .{}); Global.exit(1); }; diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index 5a790beadce463..cd927956c8727b 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -158,7 +158,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string { var tmpname = try FileSystem.instance.tmpname(basename[0..@min(basename.len, 32)], &tmpname_buf, tgz_bytes.len); { - var extract_destination = tmpdir.makeOpenPath(std.mem.span(tmpname), .{ .iterate = true }) catch |err| { + var extract_destination = tmpdir.makeOpenPathIterable(std.mem.span(tmpname), .{}) catch |err| { Output.panic("err: {s} when create temporary directory named {s} (while extracting {s})", .{ @errorName(err), tmpname, name }); }; @@ -277,7 +277,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string { // create an index storing each version of a package installed create_index: { - var index_dir = cache_dir.makeOpenPath(name, .{ .iterate = true }) catch break :create_index; + var index_dir = cache_dir.makeOpenPathIterable(name, .{}) catch break :create_index; defer index_dir.close(); index_dir.symLink( final_path, diff --git a/src/install/install.zig b/src/install/install.zig index 6c5e0a1a4099fd..0f086046357804 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -949,7 +949,7 @@ const PackageInstall = struct { } }; - var subdir = this.destination_dir.makeOpenPath(std.mem.span(this.destination_dir_subpath), .{ .iterate = true }) catch |err| return Result{ + var subdir = this.destination_dir.makeOpenPathIterable(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1069,7 +1069,7 @@ const PackageInstall = struct { } }; - var subdir = this.destination_dir.makeOpenPath(std.mem.span(this.destination_dir_subpath), .{ .iterate = true }) catch |err| return Result{ + var subdir = this.destination_dir.makeOpenPathIterable(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1126,7 +1126,7 @@ const PackageInstall = struct { } }; - var subdir = this.destination_dir.makeOpenPath(std.mem.span(this.destination_dir_subpath), .{ .iterate = true }) catch |err| return Result{ + var subdir = this.destination_dir.makeOpenPathIterable(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1221,7 +1221,7 @@ const PackageInstall = struct { } }; - var subdir = this.destination_dir.makeOpenPath(std.mem.span(this.destination_dir_subpath), .{ .iterate = true }) catch |err| return Result{ + var subdir = this.destination_dir.makeOpenPathIterable(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1655,7 +1655,7 @@ pub const PackageManager = struct { return this.global_link_dir orelse brk: { var global_dir = try Options.openGlobalDir(this.options.explicit_global_directory); this.global_dir = global_dir; - this.global_link_dir = try global_dir.makeOpenPath("node_modules", .{ .iterate = true }); + this.global_link_dir = try global_dir.makeOpenPathIterable("node_modules", .{}); var buf: [bun.MAX_PATH_BYTES]u8 = undefined; const _path = try std.os.getFdPath(this.global_link_dir.?.fd, &buf); this.global_link_dir_path = try Fs.FileSystem.DirnameStore.instance.append([]const u8, _path); @@ -1765,13 +1765,13 @@ pub const PackageManager = struct { loop: while (true) { if (this.options.enable.cache) { const cache_dir = fetchCacheDirectoryPath(this.env_loader); - return std.fs.cwd().makeOpenPath(cache_dir.path, .{ .iterate = true }) catch { + return std.fs.cwd().makeOpenPathIterable(cache_dir.path, .{}) catch { this.options.enable.cache = false; continue :loop; }; } - return std.fs.cwd().makeOpenPath("node_modules/.cache", .{ .iterate = true }) catch |err| { + return std.fs.cwd().makeOpenPathIterable("node_modules/.cache", .{}) catch |err| { Output.prettyErrorln("error: bun is unable to write files: {s}", .{@errorName(err)}); Global.crash(); }; @@ -1790,9 +1790,9 @@ pub const PackageManager = struct { // This makes renameat() work const default_tempdir = Fs.FileSystem.RealFS.getDefaultTempDir(); var tried_dot_tmp = false; - var tempdir: std.fs.Dir = std.fs.cwd().makeOpenPath(default_tempdir, .{ .iterate = true }) catch brk: { + var tempdir: std.fs.Dir = std.fs.cwd().makeOpenPathIterable(default_tempdir, .{}) catch brk: { tried_dot_tmp = true; - break :brk cache_directory.makeOpenPath(".tmp", .{ .iterate = true }) catch |err| { + break :brk cache_directory.makeOpenPathIterable(".tmp", .{}) catch |err| { Output.prettyErrorln("error: bun is unable to access tempdir: {s}", .{@errorName(err)}); Global.crash(); }; @@ -1805,7 +1805,7 @@ pub const PackageManager = struct { if (!tried_dot_tmp) { tried_dot_tmp = true; - tempdir = cache_directory.makeOpenPath(".tmp", .{ .iterate = true }) catch |err| { + tempdir = cache_directory.makeOpenPathIterable(".tmp", .{}) catch |err| { Output.prettyErrorln("error: bun is unable to access tempdir: {s}", .{@errorName(err)}); Global.crash(); }; @@ -1820,7 +1820,7 @@ pub const PackageManager = struct { std.os.renameatZ(tempdir.fd, tmpname, cache_directory.fd, tmpname) catch |err| { if (!tried_dot_tmp) { tried_dot_tmp = true; - tempdir = cache_directory.makeOpenPath(".tmp", .{ .iterate = true }) catch |err2| { + tempdir = cache_directory.makeOpenPathIterable(".tmp", .{}) catch |err2| { Output.prettyErrorln("error: bun is unable to write files to tempdir: {s}", .{@errorName(err2)}); Global.crash(); }; @@ -1978,7 +1978,7 @@ pub const PackageManager = struct { pub fn getInstalledVersionsFromDiskCache(this: *PackageManager, tags_buf: *std.ArrayList(u8), package_name: []const u8, allocator: std.mem.Allocator) !std.ArrayList(Semver.Version) { var list = std.ArrayList(Semver.Version).init(allocator); - var dir = this.getCacheDirectory().openDir(package_name, .{ .iterate = true }) catch |err| { + var dir = this.getCacheDirectory().openDir(package_name, .{}) catch |err| { switch (err) { error.FileNotFound, error.NotDir, error.AccessDenied, error.DeviceBusy => { return list; @@ -3533,25 +3533,25 @@ pub const PackageManager = struct { pub fn openGlobalDir(explicit_global_dir: string) !std.fs.Dir { if (std.os.getenvZ("BUN_INSTALL_GLOBAL_DIR")) |home_dir| { - return try std.fs.cwd().makeOpenPath(home_dir, .{ .iterate = true }); + return try std.fs.cwd().makeOpenPathIterable(home_dir, .{}); } if (explicit_global_dir.len > 0) { - return try std.fs.cwd().makeOpenPath(explicit_global_dir, .{ .iterate = true }); + return try std.fs.cwd().makeOpenPathIterable(explicit_global_dir, .{}); } if (std.os.getenvZ("BUN_INSTALL")) |home_dir| { var buf: [bun.MAX_PATH_BYTES]u8 = undefined; var parts = [_]string{ "install", "global" }; var path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto); - return try std.fs.cwd().makeOpenPath(path, .{ .iterate = true }); + return try std.fs.cwd().makeOpenPathIterable(path, .{}); } if (std.os.getenvZ("XDG_CACHE_HOME") orelse std.os.getenvZ("HOME")) |home_dir| { var buf: [bun.MAX_PATH_BYTES]u8 = undefined; var parts = [_]string{ ".bun", "install", "global" }; var path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto); - return try std.fs.cwd().makeOpenPath(path, .{ .iterate = true }); + return try std.fs.cwd().makeOpenPathIterable(path, .{}); } return error.@"No global directory found"; @@ -3559,13 +3559,13 @@ pub const PackageManager = struct { pub fn openGlobalBinDir(opts_: ?*const Api.BunInstall) !std.fs.Dir { if (std.os.getenvZ("BUN_INSTALL_BIN")) |home_dir| { - return try std.fs.cwd().makeOpenPath(home_dir, .{ .iterate = true }); + return try std.fs.cwd().makeOpenPathIterable(home_dir, .{}); } if (opts_) |opts| { if (opts.global_bin_dir) |home_dir| { if (home_dir.len > 0) { - return try std.fs.cwd().makeOpenPath(home_dir, .{ .iterate = true }); + return try std.fs.cwd().makeOpenPathIterable(home_dir, .{}); } } } @@ -3576,7 +3576,7 @@ pub const PackageManager = struct { "bin", }; var path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto); - return try std.fs.cwd().makeOpenPath(path, .{ .iterate = true }); + return try std.fs.cwd().makeOpenPathIterable(path, .{}); } if (std.os.getenvZ("XDG_CACHE_HOME") orelse std.os.getenvZ("HOME")) |home_dir| { @@ -3586,7 +3586,7 @@ pub const PackageManager = struct { "bin", }; var path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto); - return try std.fs.cwd().makeOpenPath(path, .{ .iterate = true }); + return try std.fs.cwd().makeOpenPathIterable(path, .{}); } return error.@"Missing global bin directory: try setting $BUN_INSTALL"; @@ -4546,7 +4546,7 @@ pub const PackageManager = struct { try manager.setupGlobalDir(&ctx); - break :brk manager.global_dir.?.makeOpenPath("node_modules", .{ .iterate = true }) catch |err| { + break :brk manager.global_dir.?.makeOpenPathIterable("node_modules", .{}) catch |err| { if (manager.options.log_level != .silent) Output.prettyErrorln("error: failed to create node_modules in global dir due to error {s}", .{@errorName(err)}); Global.crash(); @@ -4709,7 +4709,7 @@ pub const PackageManager = struct { try manager.setupGlobalDir(&ctx); - break :brk manager.global_dir.?.makeOpenPath("node_modules", .{ .iterate = true }) catch |err| { + break :brk manager.global_dir.?.makeOpenPathIterable("node_modules", .{}) catch |err| { if (manager.options.log_level != .silent) Output.prettyErrorln("error: failed to create node_modules in global dir due to error {s}", .{@errorName(err)}); Global.crash(); @@ -5910,13 +5910,13 @@ pub const PackageManager = struct { // we want to check lazily though // no need to download packages you've already installed!! var skip_verify_installed_version_number = false; - var node_modules_folder = std.fs.cwd().openDirZ("node_modules", .{ .iterate = true }) catch brk: { + var node_modules_folder = std.fs.cwd().openDirZ("node_modules", .{}) catch brk: { skip_verify_installed_version_number = true; std.fs.cwd().makeDirZ("node_modules") catch |err| { Output.prettyErrorln("error: {s} creating node_modules folder", .{@errorName(err)}); Global.crash(); }; - break :brk std.fs.cwd().openDirZ("node_modules", .{ .iterate = true }) catch |err| { + break :brk std.fs.cwd().openDirZ("node_modules", .{}) catch |err| { Output.prettyErrorln("error: {s} opening node_modules folder", .{@errorName(err)}); Global.crash(); }; diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 9fad998cb2f3a9..e5c873160de865 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -2248,10 +2248,9 @@ pub const Resolver = struct { path.ptr[queue_top.unsafe_path.len] = 0; defer path.ptr[queue_top.unsafe_path.len] = prev_char; var sentinel = path.ptr[0..queue_top.unsafe_path.len :0]; - _open_dir = std.fs.openDirAbsoluteZ( + _open_dir = std.fs.openIterableDirAbsoluteZ( sentinel, .{ - .iterate = true, .no_follow = !follow_symlinks, }, ); diff --git a/src/router.zig b/src/router.zig index cb39fa07971c9a..21d6deaa356cbf 100644 --- a/src/router.zig +++ b/src/router.zig @@ -880,10 +880,10 @@ pub const MockServer = struct { fn makeTest(cwd_path: string, data: anytype) !void { Output.initTest(); std.debug.assert(cwd_path.len > 1 and !strings.eql(cwd_path, "/") and !strings.endsWith(cwd_path, "bun")); - const bun_tests_dir = try std.fs.cwd().makeOpenPath("bun-test-scratch", .{ .iterate = true }); + const bun_tests_dir = try std.fs.cwd().makeOpenPathIterable("bun-test-scratch", .{}); bun_tests_dir.deleteTree(cwd_path) catch {}; - const cwd = try bun_tests_dir.makeOpenPath(cwd_path, .{ .iterate = true }); + const cwd = try bun_tests_dir.makeOpenPathIterable(cwd_path, .{}); try cwd.setAsCwd(); const Data = @TypeOf(data); From aec0b18231f442e2cdfa4a7cb05cadb0103d3645 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Sat, 12 Nov 2022 05:08:43 +0100 Subject: [PATCH 41/51] Fix: IterableDir --- src/bun.js/bindings/header-gen.zig | 2 +- src/bun.js/module_loader.zig | 4 +-- src/bun.js/node/dir_iterator.zig | 2 +- src/bundler.zig | 24 +++++++++------- src/cache.zig | 2 ++ src/cli/create_command.zig | 14 ++++----- src/cli/install_completions_command.zig | 38 ++++++++++++------------- src/cli/test_command.zig | 2 +- src/cli/upgrade_command.zig | 2 +- src/fs.zig | 12 ++++---- src/install/bin.zig | 6 ++-- src/install/install.zig | 24 ++++++++-------- src/libarchive/libarchive.zig | 8 +++--- src/options.zig | 8 +++--- src/resolver/resolver.zig | 2 +- src/walker_skippable.zig | 4 +-- src/watcher.zig | 2 +- 17 files changed, 80 insertions(+), 76 deletions(-) diff --git a/src/bun.js/bindings/header-gen.zig b/src/bun.js/bindings/header-gen.zig index d0b24c8ce33675..d9b1b9f394a45e 100644 --- a/src/bun.js/bindings/header-gen.zig +++ b/src/bun.js/bindings/header-gen.zig @@ -1,5 +1,5 @@ const std = @import("std"); -const Dir = std.fs.Dir; +const Dir = std.fs.IterableDir; const FnMeta = std.builtin.Type.Fn; const FnDecl = std.builtin.Type.Declaration.Data.FnDecl; const StructMeta = std.builtin.Type.Struct; diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index f100fd6e8be440..a13aebac102c4f 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -156,11 +156,11 @@ fn dumpSource(specifier: string, printer: anytype) !void { pub var dir: ?std.fs.Dir = null; }; if (BunDebugHolder.dir == null) { - BunDebugHolder.dir = try std.fs.cwd().makeOpenPathIterable("/tmp/bun-debug-src/", .{}); + BunDebugHolder.dir = try std.fs.cwd().makeOpenPath("/tmp/bun-debug-src/", .{}); } if (std.fs.path.dirname(specifier)) |dir_path| { - var parent = try BunDebugHolder.dir.?.makeOpenPathIterable(dir_path[1..], .{}); + var parent = try BunDebugHolder.dir.?.makeOpenPath(dir_path[1..], .{}); defer parent.close(); try parent.writeFile(std.fs.path.basename(specifier), printer.ctx.getWritten()); } else { diff --git a/src/bun.js/node/dir_iterator.zig b/src/bun.js/node/dir_iterator.zig index 19db4177db0257..3af04a3db8fe1a 100644 --- a/src/bun.js/node/dir_iterator.zig +++ b/src/bun.js/node/dir_iterator.zig @@ -8,7 +8,7 @@ const builtin = @import("builtin"); const std = @import("std"); const os = std.os; -const Dir = std.fs.Dir; +const Dir = std.fs.IterableDir; const JSC = @import("../../jsc.zig"); const PathString = JSC.PathString; diff --git a/src/bundler.zig b/src/bundler.zig index 6fa9b06695f1ac..f9ff22a38b90d9 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -56,6 +56,8 @@ const TOML = @import("./toml/toml_parser.zig").TOML; const JSC = @import("javascript_core"); const PackageManager = @import("./install/install.zig").PackageManager; +const Dir = std.fs.IterableDir; + pub fn MacroJSValueType_() type { if (comptime JSC.is_bindgen) { return struct { @@ -962,7 +964,7 @@ pub const Bundler = struct { var file: std.fs.File = undefined; - if (Outstream == std.fs.Dir) { + if (Outstream == Dir) { const output_dir = outstream; if (std.fs.path.dirname(file_path.pretty)) |dirname| { @@ -1030,7 +1032,7 @@ pub const Bundler = struct { file_op.is_tmpdir = false; - if (Outstream == std.fs.Dir) { + if (Outstream == Dir) { file_op.dir = outstream.fd; if (bundler.fs.fs.needToCloseFiles()) { @@ -1088,7 +1090,7 @@ pub const Bundler = struct { file_op.is_tmpdir = false; - if (Outstream == std.fs.Dir) { + if (Outstream == Dir) { file_op.dir = outstream.fd; if (bundler.fs.fs.needToCloseFiles()) { @@ -1775,10 +1777,10 @@ pub const Bundler = struct { if (framework.client.isEnabled()) { did_start = true; try switch (bundler.options.import_path_format) { - .relative => bundler.processResolveQueue(.relative, true, std.fs.Dir, output_dir), - .absolute_url => bundler.processResolveQueue(.absolute_url, true, std.fs.Dir, output_dir), - .absolute_path => bundler.processResolveQueue(.absolute_path, true, std.fs.Dir, output_dir), - .package_path => bundler.processResolveQueue(.package_path, true, std.fs.Dir, output_dir), + .relative => bundler.processResolveQueue(.relative, true, Dir, output_dir), + .absolute_url => bundler.processResolveQueue(.absolute_url, true, Dir, output_dir), + .absolute_path => bundler.processResolveQueue(.absolute_path, true, Dir, output_dir), + .package_path => bundler.processResolveQueue(.package_path, true, Dir, output_dir), }; } } @@ -1786,10 +1788,10 @@ pub const Bundler = struct { if (!did_start) { try switch (bundler.options.import_path_format) { - .relative => bundler.processResolveQueue(.relative, false, std.fs.Dir, output_dir), - .absolute_url => bundler.processResolveQueue(.absolute_url, false, std.fs.Dir, output_dir), - .absolute_path => bundler.processResolveQueue(.absolute_path, false, std.fs.Dir, output_dir), - .package_path => bundler.processResolveQueue(.package_path, false, std.fs.Dir, output_dir), + .relative => bundler.processResolveQueue(.relative, false, Dir, output_dir), + .absolute_url => bundler.processResolveQueue(.absolute_url, false, Dir, output_dir), + .absolute_path => bundler.processResolveQueue(.absolute_path, false, Dir, output_dir), + .package_path => bundler.processResolveQueue(.package_path, false, Dir, output_dir), }; } } diff --git a/src/cache.zig b/src/cache.zig index a89a597f4530a8..3ee2138d4ee2ea 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -26,6 +26,8 @@ const import_record = @import("./import_record.zig"); const ImportRecord = import_record.ImportRecord; +const Dir = std.fs.IterableDir; + pub const FsCacheEntry = struct { contents: string, fd: StoredFileDescriptorType = 0, diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index c42cf57a7ffdd2..43494c89342e89 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -564,7 +564,7 @@ pub const CreateCommand = struct { node.name = "Copying files"; progress.refresh(); - const template_dir = std.fs.openDirAbsolute(filesystem.abs(&template_parts), .{ .iterate = true }) catch |err| { + const template_dir = std.fs.openIterableDirAbsolute(filesystem.abs(&template_parts), .{}) catch |err| { node.end(); progress.refresh(); @@ -1721,33 +1721,33 @@ pub const Example = struct { var examples = std.ArrayList(Example).fromOwnedSlice(ctx.allocator, remote_examples); { - var folders = [3]std.fs.Dir{ std.fs.Dir{ .fd = 0 }, std.fs.Dir{ .fd = 0 }, std.fs.Dir{ .fd = 0 } }; + var folders = [3]std.fs.IterableDir{ std.fs.Dir{ .fd = 0 }, std.fs.Dir{ .fd = 0 }, std.fs.Dir{ .fd = 0 } }; if (env_loader.map.get("BUN_CREATE_DIR")) |home_dir| { var parts = [_]string{home_dir}; var outdir_path = filesystem.absBuf(&parts, &home_dir_buf); - folders[0] = std.fs.openDirAbsolute(outdir_path, .{ .iterate = true }) catch std.fs.Dir{ .fd = 0 }; + folders[0] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch std.fs.Dir{ .fd = 0 }; } { var parts = [_]string{ filesystem.top_level_dir, BUN_CREATE_DIR }; var outdir_path = filesystem.absBuf(&parts, &home_dir_buf); - folders[1] = std.fs.openDirAbsolute(outdir_path, .{ .iterate = true }) catch std.fs.Dir{ .fd = 0 }; + folders[1] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch std.fs.Dir{ .fd = 0 }; } if (env_loader.map.get("HOME")) |home_dir| { var parts = [_]string{ home_dir, BUN_CREATE_DIR }; var outdir_path = filesystem.absBuf(&parts, &home_dir_buf); - folders[2] = std.fs.openDirAbsolute(outdir_path, .{ .iterate = true }) catch std.fs.Dir{ .fd = 0 }; + folders[2] = std.fs.openIterableDirAbsolute(outdir_path, .{}) catch std.fs.Dir{ .fd = 0 }; } // subfolders with package.json for (folders) |folder_| { if (folder_.fd != 0) { - const folder: std.fs.Dir = folder_; + const folder: std.fs.IterableDir = folder_; var iter = folder.iterate(); loop: while (iter.next() catch null) |entry_| { - const entry: std.fs.Dir.Entry = entry_; + const entry: std.fs.IterableDir.Entry = entry_; switch (entry.kind) { .Directory => { diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig index 456f16e2bbb4c2..b24800ef1d1ac6 100644 --- a/src/cli/install_completions_command.zig +++ b/src/cli/install_completions_command.zig @@ -97,8 +97,8 @@ pub const InstallCompletionsCommand = struct { Global.exit(fail_exit_code); } - break :found std.fs.openDirAbsolute(completions_dir, .{ - .iterate = true, + break :found std.fs.openIterableDirAbsolute(completions_dir, .{ + }) catch |err| { Output.prettyErrorln("error: accessing {any} errored {any}", .{ completions_dir, @errorName(err) }); Global.exit(fail_exit_code); @@ -115,7 +115,7 @@ pub const InstallCompletionsCommand = struct { outer: { var paths = [_]string{ std.mem.span(config_dir), "./fish/completions" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -125,7 +125,7 @@ pub const InstallCompletionsCommand = struct { var paths = [_]string{ std.mem.span(data_dir), "./fish/completions" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -134,7 +134,7 @@ pub const InstallCompletionsCommand = struct { outer: { var paths = [_]string{ std.mem.span(home_dir), "./.config/fish/completions" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -144,12 +144,12 @@ pub const InstallCompletionsCommand = struct { if (!Environment.isAarch64) { // homebrew fish completions_dir = "/usr/local/share/fish/completions"; - break :found std.fs.openDirAbsoluteZ("/usr/local/share/fish/completions", .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsoluteZ("/usr/local/share/fish/completions", .{}) catch break :outer; } else { // homebrew fish completions_dir = "/opt/homebrew/share/fish/completions"; - break :found std.fs.openDirAbsoluteZ("/opt/homebrew/share/fish/completions", .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsoluteZ("/opt/homebrew/share/fish/completions", .{}) catch break :outer; } } @@ -157,7 +157,7 @@ pub const InstallCompletionsCommand = struct { outer: { completions_dir = "/etc/fish/completions"; - break :found std.fs.openDirAbsoluteZ("/etc/fish/completions", .{ .iterate = true }) catch break :outer; + break :found std.fs.openIterableDirAbsoluteZ("/etc/fish/completions", .{}) catch break :outer; } }, .zsh => { @@ -166,7 +166,7 @@ pub const InstallCompletionsCommand = struct { while (splitter.next()) |dir| { completions_dir = dir; - break :found std.fs.openDirAbsolute(dir, .{ .iterate = true }) catch continue; + break :found std.fs.openIterableDirAbsolute(dir, .{}) catch continue; } } @@ -175,7 +175,7 @@ pub const InstallCompletionsCommand = struct { var paths = [_]string{ std.mem.span(data_dir), "./zsh-completions" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -183,7 +183,7 @@ pub const InstallCompletionsCommand = struct { if (std.os.getenvZ("BUN_INSTALL")) |home_dir| { outer: { completions_dir = home_dir; - break :found std.fs.openDirAbsolute(home_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(home_dir, .{}) catch break :outer; } } @@ -193,7 +193,7 @@ pub const InstallCompletionsCommand = struct { outer: { var paths = [_]string{ std.mem.span(home_dir), "./.oh-my-zsh/completions" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -202,7 +202,7 @@ pub const InstallCompletionsCommand = struct { outer: { var paths = [_]string{ std.mem.span(home_dir), "./.bun" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -217,7 +217,7 @@ pub const InstallCompletionsCommand = struct { for (dirs_to_try) |dir| { completions_dir = dir; - break :found std.fs.openDirAbsolute(dir, .{ .iterate = true }) catch continue; + break :found std.fs.openIterableDirAbsolute(dir, .{}) catch continue; } }, .bash => { @@ -225,7 +225,7 @@ pub const InstallCompletionsCommand = struct { outer: { var paths = [_]string{ std.mem.span(data_dir), "./bash-completion/completions" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -235,7 +235,7 @@ pub const InstallCompletionsCommand = struct { var paths = [_]string{ std.mem.span(config_dir), "./bash-completion/completions" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -246,7 +246,7 @@ pub const InstallCompletionsCommand = struct { var paths = [_]string{ std.mem.span(home_dir), "./.oh-my-bash/custom/completions" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -255,7 +255,7 @@ pub const InstallCompletionsCommand = struct { var paths = [_]string{ std.mem.span(home_dir), "./.bash_completion.d" }; completions_dir = resolve_path.joinAbsString(cwd, &paths, .auto); - break :found std.fs.openDirAbsolute(completions_dir, .{ .iterate = true }) catch + break :found std.fs.openIterableDirAbsolute(completions_dir, .{}) catch break :outer; } } @@ -268,7 +268,7 @@ pub const InstallCompletionsCommand = struct { for (dirs_to_try) |dir| { completions_dir = dir; - break :found std.fs.openDirAbsolute(dir, .{ .iterate = true }) catch continue; + break :found std.fs.openIterableDirAbsolute(dir, .{}) catch continue; } }, else => unreachable, diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index 00f2d5802d2ea3..5f11ce8890a414 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -215,7 +215,7 @@ const Scanner = struct { var path2 = this.fs.absBuf(parts2, &this.open_dir_buf); this.open_dir_buf[path2.len] = 0; var pathZ = this.open_dir_buf[path2.len - entry.name.slice().len .. path2.len :0]; - var child_dir = dir.openDirZ(pathZ, .{ .iterate = true }) catch continue; + var child_dir = dir.openIterableDirZ(pathZ, .{}) catch continue; path2 = this.fs.dirname_store.append(string, path2) catch unreachable; FileSystem.setMaxFd(child_dir.fd); _ = this.readDirWithName(path2, child_dir) catch continue; diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index ab9459385fc1da..90e3fc3766b5b6 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -637,7 +637,7 @@ pub const UpgradeCommand = struct { // safe because the slash will no longer be in use current_executable_buf[target_dir_.len] = 0; var target_dirname = current_executable_buf[0..target_dir_.len :0]; - var target_dir = std.fs.openDirAbsoluteZ(target_dirname, .{ .iterate = true }) catch |err| { + var target_dir = std.fs.openIterableDirAbsoluteZ(target_dirname, .{}) catch |err| { save_dir_.deleteTree(version_name) catch {}; Output.prettyErrorln("error: Failed to open bun's install directory {any}", .{@errorName(err)}); Global.exit(1); diff --git a/src/fs.zig b/src/fs.zig index 0dbb28a31d98e1..4b9d5c7eb91b47 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -567,7 +567,7 @@ pub const FileSystem = struct { tmpdir_path_set = true; } - return try std.fs.openDirAbsolute(tmpdir_path, .{ .access_sub_paths = true, .iterate = true }); + return try std.fs.openIterableDirAbsolute(tmpdir_path, .{ .access_sub_paths = true }); } pub fn getDefaultTempDir() string { @@ -814,17 +814,17 @@ pub const FileSystem = struct { }; pub fn openDir(_: *RealFS, unsafe_dir_string: string) std.fs.File.OpenError!std.fs.Dir { - return try std.fs.openDirAbsolute(unsafe_dir_string, std.fs.Dir.OpenDirOptions{ .iterate = true, .access_sub_paths = true, .no_follow = false }); + return try std.fs.openIterableDirAbsolute(unsafe_dir_string, std.fs.Dir.OpenDirOptions{ .access_sub_paths = true, .no_follow = false }); } fn readdir( fs: *RealFS, _dir: string, - handle: std.fs.Dir, + handle: std.fs.IterableDir, comptime Iterator: type, iterator: Iterator, ) !DirEntry { - var iter: std.fs.Dir.Iterator = handle.iterate(); + var iter: std.fs.IterableDir.Iterator = handle.iterate(); var dir = DirEntry.init(_dir); const allocator = fs.allocator; errdefer dir.deinit(allocator); @@ -859,11 +859,11 @@ pub const FileSystem = struct { threadlocal var temp_entries_option: EntriesOption = undefined; - pub fn readDirectory(fs: *RealFS, _dir: string, _handle: ?std.fs.Dir) !*EntriesOption { + pub fn readDirectory(fs: *RealFS, _dir: string, _handle: ?std.fs.IterableDir) !*EntriesOption { return readDirectoryWithIterator(fs, _dir, _handle, void, void{}); } - pub fn readDirectoryWithIterator(fs: *RealFS, _dir: string, _handle: ?std.fs.Dir, comptime Iterator: type, iterator: Iterator) !*EntriesOption { + pub fn readDirectoryWithIterator(fs: *RealFS, _dir: string, _handle: ?std.fs.IterableDir, comptime Iterator: type, iterator: Iterator) !*EntriesOption { var dir = _dir; var cache_result: ?allocators.Result = null; if (comptime FeatureFlags.enable_entry_cache) { diff --git a/src/install/bin.zig b/src/install/bin.zig index 8c7175e300c076..b0d67c01e9f047 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -167,7 +167,7 @@ pub const Bin = extern struct { var joined = Path.joinStringBuf(&this.buf, &parts, .auto); this.buf[joined.len] = 0; var joined_: [:0]u8 = this.buf[0..joined.len :0]; - var child_dir = try dir.openDirZ(joined_, .{ .iterate = true }); + var child_dir = try dir.openIterableDirZ(joined_, .{}); this.dir_iterator = child_dir.iterate(); } @@ -419,7 +419,7 @@ pub const Bin = extern struct { var joined = Path.joinStringBuf(&target_buf, &parts, .auto); @intToPtr([*]u8, @ptrToInt(joined.ptr))[joined.len] = 0; var joined_: [:0]const u8 = joined.ptr[0..joined.len :0]; - var child_dir = dir.openDirZ(joined_, .{ .iterate = true }) catch |err| { + var child_dir = dir.openIterableDirZ(joined_, .{}) catch |err| { this.err = err; return; }; @@ -571,7 +571,7 @@ pub const Bin = extern struct { var joined = Path.joinStringBuf(&target_buf, &parts, .auto); @intToPtr([*]u8, @ptrToInt(joined.ptr))[joined.len] = 0; var joined_: [:0]const u8 = joined.ptr[0..joined.len :0]; - var child_dir = dir.openDirZ(joined_, .{ .iterate = true }) catch |err| { + var child_dir = dir.openIterableDirZ(joined_, .{}) catch |err| { this.err = err; return; }; diff --git a/src/install/install.zig b/src/install/install.zig index 0f086046357804..88903354eb2223 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -889,8 +889,8 @@ const PackageInstall = struct { fn installWithClonefileEachDir(this: *PackageInstall) !Result { const Walker = @import("../walker_skippable.zig"); - var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{ - .iterate = true, + var cached_package_dir = this.cache_dir.openIterableDirZ(this.cache_dir_subpath, .{ + }) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1005,8 +1005,8 @@ const PackageInstall = struct { const Walker = @import("../walker_skippable.zig"); const CopyFile = @import("../copy_file.zig"); - var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{ - .iterate = true, + var cached_package_dir = this.cache_dir.openIterableDirZ(this.cache_dir_subpath, .{ + }) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1087,8 +1087,8 @@ const PackageInstall = struct { fn installWithHardlink(this: *PackageInstall) !Result { const Walker = @import("../walker_skippable.zig"); - var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{ - .iterate = true, + var cached_package_dir = this.cache_dir.openIterableDirZ(this.cache_dir_subpath, .{ + }) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1150,8 +1150,8 @@ const PackageInstall = struct { fn installWithSymlink(this: *PackageInstall) !Result { const Walker = @import("../walker_skippable.zig"); - var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{ - .iterate = true, + var cached_package_dir = this.cache_dir.openIterableDirZ(this.cache_dir_subpath, .{ + }) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -5468,8 +5468,8 @@ pub const PackageManager = struct { // This is where we clean dangling symlinks // This could be slow if there are a lot of symlinks - if (cwd.openDirZ(manager.options.bin_path, .{ - .iterate = true, + if (cwd.openIterableDirZ(manager.options.bin_path, .{ + })) |node_modules_bin_| { var node_modules_bin: std.fs.Dir = node_modules_bin_; var iter: std.fs.Dir.Iterator = node_modules_bin.iterate(); @@ -5975,8 +5975,8 @@ pub const PackageManager = struct { // We deliberately do not close this folder. // If the package hasn't been downloaded, we will need to install it later // We use this file descriptor to know where to put it. - var folder = try cwd.openDirZ(node_modules.relative_path, .{ - .iterate = true, + var folder = try cwd.openIterableDirZ(node_modules.relative_path, .{ + }); installer.node_modules_folder = folder; diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig index bb900639f459ef..ff015294ec929d 100644 --- a/src/libarchive/libarchive.zig +++ b/src/libarchive/libarchive.zig @@ -406,9 +406,9 @@ pub const Archive = struct { // if the destination doesn't exist, we skip the whole thing since nothing can overwrite it. if (std.fs.path.isAbsolute(root)) { - break :brk std.fs.openDirAbsolute(root, .{ .iterate = true }) catch return; + break :brk std.fs.openIterableDirAbsolute(root, .{}) catch return; } else { - break :brk cwd.openDir(root, .{ .iterate = true }) catch return; + break :brk cwd.openIterableDir(root, .{}) catch return; } }; @@ -654,9 +654,9 @@ pub const Archive = struct { ) catch {}; if (std.fs.path.isAbsolute(root)) { - break :brk try std.fs.openDirAbsolute(root, .{ .iterate = true }); + break :brk try std.fs.openIterableDirAbsolute(root, .{}); } else { - break :brk try cwd.openDir(root, .{ .iterate = true }); + break :brk try cwd.openIterableDir(root, .{}); } }; diff --git a/src/options.zig b/src/options.zig index 57c8ee7ce5c7c0..c6627cd267c002 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1146,7 +1146,7 @@ pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api. return loaders; } -const Dir = std.fs.Dir; +const Dir = std.fs.IterableDir; pub const SourceMapOption = enum { none, @@ -1548,7 +1548,7 @@ pub const BundleOptions = struct { if (!disabled_static) { var _dirs = [_]string{chosen_dir}; opts.routes.static_dir = try fs.absAlloc(allocator, &_dirs); - opts.routes.static_dir_handle = std.fs.openDirAbsolute(opts.routes.static_dir, .{ .iterate = true }) catch |err| brk: { + opts.routes.static_dir_handle = std.fs.openIterableDirAbsolute(opts.routes.static_dir, .{}) catch |err| brk: { switch (err) { error.FileNotFound => { opts.routes.static_dir_enabled = false; @@ -1669,13 +1669,13 @@ pub const BundleOptions = struct { }; pub fn openOutputDir(output_dir: string) !std.fs.Dir { - return std.fs.cwd().openDir(output_dir, std.fs.Dir.OpenDirOptions{ .iterate = true }) catch brk: { + return std.fs.cwd().openIterableDir(output_dir, std.fs.Dir.OpenDirOptions{}) catch brk: { std.fs.cwd().makeDir(output_dir) catch |err| { Output.printErrorln("error: Unable to mkdir \"{any}\": \"{any}\"", .{ output_dir, @errorName(err) }); Global.crash(); }; - var handle = std.fs.cwd().openDir(output_dir, std.fs.Dir.OpenDirOptions{ .iterate = true }) catch |err2| { + var handle = std.fs.cwd().openIterableDir(output_dir, std.fs.Dir.OpenDirOptions{}) catch |err2| { Output.printErrorln("error: Unable to open \"{any}\": \"{any}\"", .{ output_dir, @errorName(err2) }); Global.crash(); }; diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index e5c873160de865..39bae61441addd 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -1756,7 +1756,7 @@ pub const Resolver = struct { var dir_entries_option: *Fs.FileSystem.RealFS.EntriesOption = undefined; var needs_iter: bool = true; - var open_dir = std.fs.openDirAbsolute(dir_path, .{ .iterate = true }) catch |err| { + var open_dir = std.fs.openIterableDirAbsolute(dir_path, .{}) catch |err| { switch (err) { error.FileNotFound => unreachable, else => { diff --git a/src/walker_skippable.zig b/src/walker_skippable.zig index cc3f3fa22c7daa..14cbcb9ce0dfed 100644 --- a/src/walker_skippable.zig +++ b/src/walker_skippable.zig @@ -10,7 +10,7 @@ skip_dirnames: []const u64 = &[_]u64{}, skip_all: []const u64 = &[_]u64{}, seed: u64 = 0, -const Dir = std.fs.Dir; +const Dir = std.fs.IterableDir; pub const WalkerEntry = struct { /// The containing directory. This can be used to operate directly on `basename` @@ -78,7 +78,7 @@ pub fn next(self: *Walker) !?WalkerEntry { self.name_buffer.shrinkRetainingCapacity(cur_len); if (base.kind == .Directory) { - var new_dir = top.iter.dir.openDir(base.name, .{ .iterate = true }) catch |err| switch (err) { + var new_dir = top.iter.dir.openIterableDir(base.name, .{}) catch |err| switch (err) { error.NameTooLong => unreachable, // no path sep in base.name else => |e| return e, }; diff --git a/src/watcher.zig b/src/watcher.zig index de6d7384af6259..fa45b7ec444633 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -643,7 +643,7 @@ pub fn NewWatcher(comptime ContextType: type) type { const fd = brk: { if (fd_ > 0) break :brk fd_; - const dir = try std.fs.openDirAbsolute(file_path, .{ .iterate = true }); + const dir = try std.fs.openIterableDirAbsolute(file_path, .{}); break :brk @truncate(StoredFileDescriptorType, dir.fd); }; From 7b5cfaebc8054abf57929189bd16302c72ca3151 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 00:22:18 +0100 Subject: [PATCH 42/51] Fix: unable to evaluate constant expression --- src/bun.js/node/node_fs_constant.zig | 38 ++++++++++++++++++++++++++-- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/src/bun.js/node/node_fs_constant.zig b/src/bun.js/node/node_fs_constant.zig index 724fe49ed228ab..bf6247cdd4fdc0 100644 --- a/src/bun.js/node/node_fs_constant.zig +++ b/src/bun.js/node/node_fs_constant.zig @@ -198,7 +198,41 @@ const constants_string_format2 = const constants_string1 = std.fmt.comptimePrint(constants_string_format1, .{ Constants.F_OK, Constants.R_OK, Constants.W_OK, Constants.X_OK, Constants.COPYFILE_EXCL, Constants.COPYFILE_FICLONE, Constants.COPYFILE_FICLONE_FORCE, Constants.O_RDONLY, Constants.O_WRONLY, Constants.O_RDWR, Constants.O_CREAT, Constants.O_EXCL, Constants.O_NOCTTY, Constants.O_TRUNC, Constants.O_APPEND, Constants.O_DIRECTORY, Constants.O_NOATIME, Constants.O_NOFOLLOW, Constants.O_SYNC, Constants.O_DSYNC }); -const constants_string2 = - std.fmt.comptimePrint(constants_string_format2, .{ if (@TypeOf(Constants.O_SYMLINK) == void) "undefined" else std.fmt.comptimePrint("{}", .{Constants.O_SYMLINK}), Constants.O_DIRECT, Constants.O_NONBLOCK, Constants.S_IFMT, Constants.S_IFREG, Constants.S_IFDIR, Constants.S_IFCHR, Constants.S_IFBLK, Constants.S_IFIFO, Constants.S_IFLNK, Constants.S_IFSOCK, Constants.S_IRWXU, Constants.S_IRUSR, Constants.S_IWUSR, Constants.S_IXUSR, Constants.S_IRWXG, Constants.S_IRGRP, Constants.S_IWGRP, Constants.S_IXGRP, Constants.S_IRWXO, Constants.S_IROTH, Constants.S_IWOTH, Constants.S_IXOTH, Constants.UV_FS_O_FILEMAP }); +const constants_string2_opts = .{ + if (@TypeOf(Constants.O_SYMLINK) == void) + "undefined" + else + // TODO(vjpr): Why was this needed? It caused an error in zig@0.10. + // std.fmt.comptimePrint("{}", .{Constants.O_SYMLINK}), + // -- + Constants.O_SYMLINK, + Constants.O_DIRECT, + Constants.O_NONBLOCK, + Constants.S_IFMT, + Constants.S_IFREG, + Constants.S_IFDIR, + Constants.S_IFCHR, + Constants.S_IFBLK, + Constants.S_IFIFO, + Constants.S_IFLNK, + Constants.S_IFSOCK, + Constants.S_IRWXU, + Constants.S_IRUSR, + Constants.S_IWUSR, + Constants.S_IXUSR, + Constants.S_IRWXG, + Constants.S_IRGRP, + Constants.S_IWGRP, + Constants.S_IXGRP, + Constants.S_IRWXO, + Constants.S_IROTH, + Constants.S_IWOTH, + Constants.S_IXOTH, + Constants.UV_FS_O_FILEMAP +}; + +const constants_string2 = blk: { + break :blk std.fmt.comptimePrint(constants_string_format2, constants_string2_opts); +}; pub const constants_string = constants_string1 ++ constants_string2; From b17348ede9c165ddc3f07098b2c9abc3b1a0d982 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 00:24:06 +0100 Subject: [PATCH 43/51] Fix: shadowing closure param --- src/install/lockfile.zig | 2 +- src/install/npm.zig | 4 ++-- src/resolver/package_json.zig | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 086fe0f0156084..125b2f457bf839 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -2620,7 +2620,7 @@ pub const Package = extern struct { break :bin; }, .e_string => |_str| { - if (str.data.len > 0) { + if (_str.data.len > 0) { package.bin = Bin{ .tag = Bin.Tag.file, .value = .{ diff --git a/src/install/npm.zig b/src/install/npm.zig index e10cc2d508a115..112580c4c060fd 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -1045,8 +1045,8 @@ pub const PackageManifest = struct { } } }, - .e_string => |estr| { - package_version.cpu = Architecture.apply(Architecture.none, estr.data); + .e_string => |_str| { + package_version.cpu = Architecture.apply(Architecture.none, _str.data); }, else => {}, } diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index 9c39703f1f05e5..f396635007260a 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -370,8 +370,8 @@ pub const PackageJSON = struct { if (framework_object.expr.asProperty("router")) |router| { if (router.expr.asProperty("dir")) |route_dir| { switch (route_dir.expr.data) { - .e_string => |estr| { - const str = estr.string(allocator) catch unreachable; + .e_string => |_str| { + const str = _str.string(allocator) catch unreachable; if (str.len > 0) { pair.router.dir = str; pair.router.possible_dirs = &[_]string{}; From 66dd930bf825dfb9e425149caf44cfc96fe9bb7f Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 01:48:52 +0100 Subject: [PATCH 44/51] Fix: IterableDir stuff --- src/bun.js/node/node_fs.zig | 2 +- src/fs.zig | 10 ++--- src/install/extract_tarball.zig | 2 +- src/install/install.zig | 78 ++++++++++++++------------------- src/options.zig | 8 ++-- src/resolver/resolver.zig | 29 ++++++------ src/watcher.zig | 4 +- 7 files changed, 62 insertions(+), 71 deletions(-) diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index c28c460941001f..33a7e76f238dc0 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -3232,7 +3232,7 @@ pub const NodeFS = struct { } var entries = std.ArrayList(ExpectedType).init(bun.default_allocator); - var dir = std.fs.Dir{ .fd = fd }; + var dir = std.fs.IterableDir{ .dir = .{ .fd = fd } }; var iterator = DirIterator.iterate(dir); var entry = iterator.next(); while (switch (entry) { diff --git a/src/fs.zig b/src/fs.zig index 4b9d5c7eb91b47..97e2960748c265 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -200,7 +200,7 @@ pub const FileSystem = struct { // // dir.data.remove(name); // } - pub fn addEntry(dir: *DirEntry, entry: std.fs.Dir.Entry, allocator: std.mem.Allocator, comptime Iterator: type, iterator: Iterator) !void { + pub fn addEntry(dir: *DirEntry, entry: std.fs.IterableDir.Entry, allocator: std.mem.Allocator, comptime Iterator: type, iterator: Iterator) !void { var _kind: Entry.Kind = undefined; switch (entry.kind) { .Directory => { @@ -813,7 +813,7 @@ pub const FileSystem = struct { pub const Map = allocators.BSSMap(EntriesOption, Preallocate.Counts.dir_entry, false, 256, true); }; - pub fn openDir(_: *RealFS, unsafe_dir_string: string) std.fs.File.OpenError!std.fs.Dir { + pub fn openDir(_: *RealFS, unsafe_dir_string: string) std.fs.File.OpenError!std.fs.IterableDir { return try std.fs.openIterableDirAbsolute(unsafe_dir_string, std.fs.Dir.OpenDirOptions{ .access_sub_paths = true, .no_follow = false }); } @@ -830,8 +830,8 @@ pub const FileSystem = struct { errdefer dir.deinit(allocator); if (FeatureFlags.store_file_descriptors) { - FileSystem.setMaxFd(handle.fd); - dir.fd = handle.fd; + FileSystem.setMaxFd(handle.dir.fd); + dir.fd = handle.dir.fd; } while (try iter.next()) |_entry| { @@ -889,7 +889,7 @@ pub const FileSystem = struct { defer { if (_handle == null and fs.needToCloseFiles()) { - handle.close(); + handle.dir.close(); } } diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index cd927956c8727b..75a5181950347c 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -248,7 +248,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string { // We return a resolved absolute absolute file path to the cache dir. // To get that directory, we open the directory again. - var final_dir = cache_dir.openDirZ(folder_name, .{ .iterate = false }) catch |err| { + var final_dir = cache_dir.openDirZ(folder_name, .{}, true) catch |err| { Output.prettyErrorln( "Error {s} failed to verify cache dir for {s}", .{ diff --git a/src/install/install.zig b/src/install/install.zig index 88903354eb2223..8d9701611394af 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -889,9 +889,7 @@ const PackageInstall = struct { fn installWithClonefileEachDir(this: *PackageInstall) !Result { const Walker = @import("../walker_skippable.zig"); - var cached_package_dir = this.cache_dir.openIterableDirZ(this.cache_dir_subpath, .{ - - }) catch |err| return Result{ + var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{}, true) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; defer cached_package_dir.close(); @@ -949,7 +947,7 @@ const PackageInstall = struct { } }; - var subdir = this.destination_dir.makeOpenPathIterable(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ + var subdir = this.destination_dir.makeOpenPath(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1005,9 +1003,7 @@ const PackageInstall = struct { const Walker = @import("../walker_skippable.zig"); const CopyFile = @import("../copy_file.zig"); - var cached_package_dir = this.cache_dir.openIterableDirZ(this.cache_dir_subpath, .{ - - }) catch |err| return Result{ + var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{}, true) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; defer cached_package_dir.close(); @@ -1069,7 +1065,7 @@ const PackageInstall = struct { } }; - var subdir = this.destination_dir.makeOpenPathIterable(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ + var subdir = this.destination_dir.makeOpenPath(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1087,9 +1083,7 @@ const PackageInstall = struct { fn installWithHardlink(this: *PackageInstall) !Result { const Walker = @import("../walker_skippable.zig"); - var cached_package_dir = this.cache_dir.openIterableDirZ(this.cache_dir_subpath, .{ - - }) catch |err| return Result{ + var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{}, true) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; defer cached_package_dir.close(); @@ -1126,7 +1120,7 @@ const PackageInstall = struct { } }; - var subdir = this.destination_dir.makeOpenPathIterable(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ + var subdir = this.destination_dir.makeOpenPath(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1150,9 +1144,7 @@ const PackageInstall = struct { fn installWithSymlink(this: *PackageInstall) !Result { const Walker = @import("../walker_skippable.zig"); - var cached_package_dir = this.cache_dir.openIterableDirZ(this.cache_dir_subpath, .{ - - }) catch |err| return Result{ + var cached_package_dir = this.cache_dir.openDirZ(this.cache_dir_subpath, .{}, true) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; defer cached_package_dir.close(); @@ -1221,7 +1213,7 @@ const PackageInstall = struct { } }; - var subdir = this.destination_dir.makeOpenPathIterable(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ + var subdir = this.destination_dir.makeOpenPath(std.mem.span(this.destination_dir_subpath), .{}) catch |err| return Result{ .fail = .{ .err = err, .step = .opening_cache_dir }, }; @@ -1655,7 +1647,7 @@ pub const PackageManager = struct { return this.global_link_dir orelse brk: { var global_dir = try Options.openGlobalDir(this.options.explicit_global_directory); this.global_dir = global_dir; - this.global_link_dir = try global_dir.makeOpenPathIterable("node_modules", .{}); + this.global_link_dir = try global_dir.makeOpenPath("node_modules", .{}); var buf: [bun.MAX_PATH_BYTES]u8 = undefined; const _path = try std.os.getFdPath(this.global_link_dir.?.fd, &buf); this.global_link_dir_path = try Fs.FileSystem.DirnameStore.instance.append([]const u8, _path); @@ -1765,13 +1757,13 @@ pub const PackageManager = struct { loop: while (true) { if (this.options.enable.cache) { const cache_dir = fetchCacheDirectoryPath(this.env_loader); - return std.fs.cwd().makeOpenPathIterable(cache_dir.path, .{}) catch { + return std.fs.cwd().makeOpenPath(cache_dir.path, .{}) catch { this.options.enable.cache = false; continue :loop; }; } - return std.fs.cwd().makeOpenPathIterable("node_modules/.cache", .{}) catch |err| { + return std.fs.cwd().makeOpenPath("node_modules/.cache", .{}) catch |err| { Output.prettyErrorln("error: bun is unable to write files: {s}", .{@errorName(err)}); Global.crash(); }; @@ -1790,9 +1782,9 @@ pub const PackageManager = struct { // This makes renameat() work const default_tempdir = Fs.FileSystem.RealFS.getDefaultTempDir(); var tried_dot_tmp = false; - var tempdir: std.fs.Dir = std.fs.cwd().makeOpenPathIterable(default_tempdir, .{}) catch brk: { + var tempdir: std.fs.Dir = std.fs.cwd().makeOpenPath(default_tempdir, .{}) catch brk: { tried_dot_tmp = true; - break :brk cache_directory.makeOpenPathIterable(".tmp", .{}) catch |err| { + break :brk cache_directory.makeOpenPath(".tmp", .{}) catch |err| { Output.prettyErrorln("error: bun is unable to access tempdir: {s}", .{@errorName(err)}); Global.crash(); }; @@ -1805,7 +1797,7 @@ pub const PackageManager = struct { if (!tried_dot_tmp) { tried_dot_tmp = true; - tempdir = cache_directory.makeOpenPathIterable(".tmp", .{}) catch |err| { + tempdir = cache_directory.makeOpenPath(".tmp", .{}) catch |err| { Output.prettyErrorln("error: bun is unable to access tempdir: {s}", .{@errorName(err)}); Global.crash(); }; @@ -1820,7 +1812,7 @@ pub const PackageManager = struct { std.os.renameatZ(tempdir.fd, tmpname, cache_directory.fd, tmpname) catch |err| { if (!tried_dot_tmp) { tried_dot_tmp = true; - tempdir = cache_directory.makeOpenPathIterable(".tmp", .{}) catch |err2| { + tempdir = cache_directory.makeOpenPath(".tmp", .{}) catch |err2| { Output.prettyErrorln("error: bun is unable to write files to tempdir: {s}", .{@errorName(err2)}); Global.crash(); }; @@ -1925,7 +1917,7 @@ pub const PackageManager = struct { pub fn isFolderInCache(this: *PackageManager, folder_path: stringZ) bool { // TODO: is this slow? - var dir = this.getCacheDirectory().openDirZ(folder_path, .{ .iterate = false }) catch return false; + var dir = this.getCacheDirectory().openDirZ(folder_path, .{}, true) catch return false; dir.close(); return true; } @@ -1987,7 +1979,8 @@ pub const PackageManager = struct { } }; defer dir.close(); - var iter = dir.iterate(); + const iterable_dir = std.fs.IterableDir{ .dir = .{ .fd = dir.fd } }; + var iter = iterable_dir.iterate(); while (try iter.next()) |entry| { if (entry.kind != .Directory and entry.kind != .SymLink) continue; @@ -3533,25 +3526,25 @@ pub const PackageManager = struct { pub fn openGlobalDir(explicit_global_dir: string) !std.fs.Dir { if (std.os.getenvZ("BUN_INSTALL_GLOBAL_DIR")) |home_dir| { - return try std.fs.cwd().makeOpenPathIterable(home_dir, .{}); + return try std.fs.cwd().makeOpenPath(home_dir, .{}); } if (explicit_global_dir.len > 0) { - return try std.fs.cwd().makeOpenPathIterable(explicit_global_dir, .{}); + return try std.fs.cwd().makeOpenPath(explicit_global_dir, .{}); } if (std.os.getenvZ("BUN_INSTALL")) |home_dir| { var buf: [bun.MAX_PATH_BYTES]u8 = undefined; var parts = [_]string{ "install", "global" }; var path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto); - return try std.fs.cwd().makeOpenPathIterable(path, .{}); + return try std.fs.cwd().makeOpenPath(path, .{}); } if (std.os.getenvZ("XDG_CACHE_HOME") orelse std.os.getenvZ("HOME")) |home_dir| { var buf: [bun.MAX_PATH_BYTES]u8 = undefined; var parts = [_]string{ ".bun", "install", "global" }; var path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto); - return try std.fs.cwd().makeOpenPathIterable(path, .{}); + return try std.fs.cwd().makeOpenPath(path, .{}); } return error.@"No global directory found"; @@ -3559,13 +3552,13 @@ pub const PackageManager = struct { pub fn openGlobalBinDir(opts_: ?*const Api.BunInstall) !std.fs.Dir { if (std.os.getenvZ("BUN_INSTALL_BIN")) |home_dir| { - return try std.fs.cwd().makeOpenPathIterable(home_dir, .{}); + return try std.fs.cwd().makeOpenPath(home_dir, .{}); } if (opts_) |opts| { if (opts.global_bin_dir) |home_dir| { if (home_dir.len > 0) { - return try std.fs.cwd().makeOpenPathIterable(home_dir, .{}); + return try std.fs.cwd().makeOpenPath(home_dir, .{}); } } } @@ -3576,7 +3569,7 @@ pub const PackageManager = struct { "bin", }; var path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto); - return try std.fs.cwd().makeOpenPathIterable(path, .{}); + return try std.fs.cwd().makeOpenPath(path, .{}); } if (std.os.getenvZ("XDG_CACHE_HOME") orelse std.os.getenvZ("HOME")) |home_dir| { @@ -3586,7 +3579,7 @@ pub const PackageManager = struct { "bin", }; var path = Path.joinAbsStringBuf(home_dir, &buf, &parts, .auto); - return try std.fs.cwd().makeOpenPathIterable(path, .{}); + return try std.fs.cwd().makeOpenPath(path, .{}); } return error.@"Missing global bin directory: try setting $BUN_INSTALL"; @@ -4546,7 +4539,7 @@ pub const PackageManager = struct { try manager.setupGlobalDir(&ctx); - break :brk manager.global_dir.?.makeOpenPathIterable("node_modules", .{}) catch |err| { + break :brk manager.global_dir.?.makeOpenPath("node_modules", .{}) catch |err| { if (manager.options.log_level != .silent) Output.prettyErrorln("error: failed to create node_modules in global dir due to error {s}", .{@errorName(err)}); Global.crash(); @@ -4709,7 +4702,7 @@ pub const PackageManager = struct { try manager.setupGlobalDir(&ctx); - break :brk manager.global_dir.?.makeOpenPathIterable("node_modules", .{}) catch |err| { + break :brk manager.global_dir.?.makeOpenPath("node_modules", .{}) catch |err| { if (manager.options.log_level != .silent) Output.prettyErrorln("error: failed to create node_modules in global dir due to error {s}", .{@errorName(err)}); Global.crash(); @@ -5468,11 +5461,10 @@ pub const PackageManager = struct { // This is where we clean dangling symlinks // This could be slow if there are a lot of symlinks - if (cwd.openIterableDirZ(manager.options.bin_path, .{ - - })) |node_modules_bin_| { + if (cwd.openDirZ(manager.options.bin_path, .{}, true)) |node_modules_bin_| { var node_modules_bin: std.fs.Dir = node_modules_bin_; - var iter: std.fs.Dir.Iterator = node_modules_bin.iterate(); + const iterable_dir = std.fs.IterableDir{ .dir = .{ .fd = node_modules_bin.fd } }; + var iter: std.fs.Dir.Iterator = iterable_dir.iterate(); iterator: while (iter.next() catch null) |entry| { switch (entry.kind) { std.fs.Dir.Entry.Kind.SymLink => { @@ -5910,13 +5902,13 @@ pub const PackageManager = struct { // we want to check lazily though // no need to download packages you've already installed!! var skip_verify_installed_version_number = false; - var node_modules_folder = std.fs.cwd().openDirZ("node_modules", .{}) catch brk: { + var node_modules_folder = std.fs.cwd().openDirZ("node_modules", .{}, true) catch brk: { skip_verify_installed_version_number = true; std.fs.cwd().makeDirZ("node_modules") catch |err| { Output.prettyErrorln("error: {s} creating node_modules folder", .{@errorName(err)}); Global.crash(); }; - break :brk std.fs.cwd().openDirZ("node_modules", .{}) catch |err| { + break :brk std.fs.cwd().openDirZ("node_modules", .{}, true) catch |err| { Output.prettyErrorln("error: {s} opening node_modules folder", .{@errorName(err)}); Global.crash(); }; @@ -5975,9 +5967,7 @@ pub const PackageManager = struct { // We deliberately do not close this folder. // If the package hasn't been downloaded, we will need to install it later // We use this file descriptor to know where to put it. - var folder = try cwd.openIterableDirZ(node_modules.relative_path, .{ - - }); + var folder = try cwd.openDirZ(node_modules.relative_path, .{}, true); installer.node_modules_folder = folder; diff --git a/src/options.zig b/src/options.zig index c6627cd267c002..8b8cbf87be5680 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1146,7 +1146,7 @@ pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api. return loaders; } -const Dir = std.fs.IterableDir; +const Dir = std.fs.Dir; pub const SourceMapOption = enum { none, @@ -1548,7 +1548,7 @@ pub const BundleOptions = struct { if (!disabled_static) { var _dirs = [_]string{chosen_dir}; opts.routes.static_dir = try fs.absAlloc(allocator, &_dirs); - opts.routes.static_dir_handle = std.fs.openIterableDirAbsolute(opts.routes.static_dir, .{}) catch |err| brk: { + opts.routes.static_dir_handle = std.fs.openDirAbsolute(opts.routes.static_dir, .{}) catch |err| brk: { switch (err) { error.FileNotFound => { opts.routes.static_dir_enabled = false; @@ -1669,13 +1669,13 @@ pub const BundleOptions = struct { }; pub fn openOutputDir(output_dir: string) !std.fs.Dir { - return std.fs.cwd().openIterableDir(output_dir, std.fs.Dir.OpenDirOptions{}) catch brk: { + return std.fs.cwd().openDir(output_dir, std.fs.Dir.OpenDirOptions{}) catch brk: { std.fs.cwd().makeDir(output_dir) catch |err| { Output.printErrorln("error: Unable to mkdir \"{any}\": \"{any}\"", .{ output_dir, @errorName(err) }); Global.crash(); }; - var handle = std.fs.cwd().openIterableDir(output_dir, std.fs.Dir.OpenDirOptions{}) catch |err2| { + var handle = std.fs.cwd().openDir(output_dir, std.fs.Dir.OpenDirOptions{}) catch |err2| { Output.printErrorln("error: Unable to open \"{any}\": \"{any}\"", .{ output_dir, @errorName(err2) }); Global.crash(); }; diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 39bae61441addd..dfea68480a76a1 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -233,7 +233,7 @@ pub const DirEntryResolveQueueItem = struct { }; threadlocal var _dir_entry_paths_to_resolve: [256]DirEntryResolveQueueItem = undefined; -threadlocal var _open_dirs: [256]std.fs.Dir = undefined; +threadlocal var _open_dirs: [256]std.fs.IterableDir = undefined; threadlocal var resolve_without_remapping_buf: [bun.MAX_PATH_BYTES]u8 = undefined; threadlocal var index_buf: [bun.MAX_PATH_BYTES]u8 = undefined; threadlocal var dir_info_uncached_filename_buf: [bun.MAX_PATH_BYTES]u8 = undefined; @@ -1781,8 +1781,8 @@ pub const Resolver = struct { }) catch unreachable; if (FeatureFlags.store_file_descriptors) { - Fs.FileSystem.setMaxFd(open_dir.fd); - dir_entries_option.entries.fd = open_dir.fd; + Fs.FileSystem.setMaxFd(open_dir.dir.fd); + dir_entries_option.entries.fd = open_dir.dir.fd; } var dir_iterator = open_dir.iterate(); while (dir_iterator.next() catch null) |_value| { @@ -1804,7 +1804,7 @@ pub const Resolver = struct { // to check for a parent package.json null, allocators.NotFound, - open_dir.fd, + open_dir.dir.fd, package_id, ); return dir_info_ptr; @@ -2212,7 +2212,7 @@ pub const Resolver = struct { // Anything if (open_dir_count > 0 and r.fs.fs.needToCloseFiles()) { - var open_dirs: []std.fs.Dir = _open_dirs[0..open_dir_count]; + var open_dirs: []std.fs.IterableDir = _open_dirs[0..open_dir_count]; for (open_dirs) |*open_dir| { open_dir.close(); } @@ -2239,7 +2239,7 @@ pub const Resolver = struct { defer top_parent = queue_top.result; queue_slice.len -= 1; - var _open_dir: anyerror!std.fs.Dir = undefined; + var _open_dir: anyerror!std.fs.IterableDir = undefined; if (queue_top.fd == 0) { // This saves us N copies of .toPosixPath @@ -2257,7 +2257,7 @@ pub const Resolver = struct { // } } - const open_dir = if (queue_top.fd != 0) std.fs.Dir{ .fd = queue_top.fd } else (_open_dir catch |err| { + const open_dir = if (queue_top.fd != 0) std.fs.IterableDir{ .dir = .{ .fd = queue_top.fd } } else (_open_dir catch |err| { switch (err) { error.EACCESS => {}, @@ -2302,7 +2302,7 @@ pub const Resolver = struct { }); if (queue_top.fd == 0) { - Fs.FileSystem.setMaxFd(open_dir.fd); + Fs.FileSystem.setMaxFd(open_dir.dir.fd); // these objects mostly just wrap the file descriptor, so it's fine to keep it. _open_dirs[open_dir_count] = open_dir; open_dir_count += 1; @@ -2352,10 +2352,11 @@ pub const Resolver = struct { }); if (FeatureFlags.store_file_descriptors) { - Fs.FileSystem.setMaxFd(open_dir.fd); - dir_entries_option.entries.fd = open_dir.fd; + Fs.FileSystem.setMaxFd(open_dir.dir.fd); + dir_entries_option.entries.fd = open_dir.dir.fd; } - var dir_iterator = open_dir.iterate(); + const iterable_dir = std.fs.IterableDir{ .dir = .{ .fd = open_dir.dir.fd } }; + var dir_iterator = iterable_dir.iterate(); while (try dir_iterator.next()) |_value| { dir_entries_option.entries.addEntry(_value, allocator, void, void{}) catch unreachable; } @@ -2373,7 +2374,7 @@ pub const Resolver = struct { cached_dir_entry_result.index, r.dir_cache.atIndex(top_parent.index), top_parent.index, - open_dir.fd, + open_dir.dir.fd, null, ); @@ -3227,7 +3228,7 @@ pub const Resolver = struct { } const this_dir = std.fs.Dir{ .fd = fd }; - var file = this_dir.openDirZ("node_modules/.bin", .{}) catch break :append_bin_dir; + var file = this_dir.openDirZ("node_modules/.bin", .{}, true) catch break :append_bin_dir; defer file.close(); var bin_path = std.os.getFdPath(file.fd, &node_bin_path) catch break :append_bin_dir; bin_folders_lock.lock(); @@ -3252,7 +3253,7 @@ pub const Resolver = struct { } const this_dir = std.fs.Dir{ .fd = fd }; - var file = this_dir.openDirZ(".bin", .{}) catch break :append_bin_dir; + var file = this_dir.openDirZ(".bin", .{}, true) catch break :append_bin_dir; defer file.close(); var bin_path = std.os.getFdPath(file.fd, &node_bin_path) catch break :append_bin_dir; bin_folders_lock.lock(); diff --git a/src/watcher.zig b/src/watcher.zig index fa45b7ec444633..ab4d1d323017f5 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -643,8 +643,8 @@ pub fn NewWatcher(comptime ContextType: type) type { const fd = brk: { if (fd_ > 0) break :brk fd_; - const dir = try std.fs.openIterableDirAbsolute(file_path, .{}); - break :brk @truncate(StoredFileDescriptorType, dir.fd); + const dir = (try std.fs.openIterableDirAbsolute(file_path, .{})); + break :brk @truncate(StoredFileDescriptorType, dir.dir.fd); }; const parent_hash = Watcher.getHash(Fs.PathName.init(file_path).dirWithTrailingSlash()); From ef6fc1cd7e2a6442df574656be8abeca53f51095 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 04:39:04 +0100 Subject: [PATCH 45/51] Fix: posix_spawnattr_destroy --- src/bun.js/api/bun/spawn.zig | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/src/bun.js/api/bun/spawn.zig b/src/bun.js/api/bun/spawn.zig index c1deebd3a12a45..1bd211cfd6b121 100644 --- a/src/bun.js/api/bun/spawn.zig +++ b/src/bun.js/api/bun/spawn.zig @@ -51,12 +51,7 @@ pub const PosixSpawn = struct { } pub fn deinit(self: *Attr) void { - if (comptime bun.Environment.isMac) { - // https://github.com/ziglang/zig/issues/12964 - system.posix_spawnattr_destroy(&self.attr); - } else { - _ = system.posix_spawnattr_destroy(&self.attr); - } + _ = system.posix_spawnattr_destroy(&self.attr); self.* = undefined; } @@ -93,12 +88,7 @@ pub const PosixSpawn = struct { } pub fn deinit(self: *Actions) void { - if (comptime bun.Environment.isMac) { - // https://github.com/ziglang/zig/issues/12964 - system.posix_spawn_file_actions_destroy(&self.actions); - } else { - _ = system.posix_spawn_file_actions_destroy(&self.actions); - } + _ = system.posix_spawn_file_actions_destroy(&self.actions); self.* = undefined; } From 9ed2f8d7b2b4742b11ad591ba282fc20853c2723 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 04:39:45 +0100 Subject: [PATCH 46/51] string stuff --- src/__global.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/__global.zig b/src/__global.zig index 79f80458f69946..70d9e8b6860cd3 100644 --- a/src/__global.zig +++ b/src/__global.zig @@ -16,9 +16,9 @@ else pub const package_json_version_with_sha = if (Environment.git_sha.len == 0) package_json_version else if (Environment.isDebug) - std.fmt.comptimePrint(BASE_VERSION ++ ".{d}_debug ({any})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }) + std.fmt.comptimePrint(BASE_VERSION ++ ".{d}_debug ({s})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }) else - std.fmt.comptimePrint(BASE_VERSION ++ ".{d} ({any})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }); + std.fmt.comptimePrint(BASE_VERSION ++ ".{d} ({s})", .{ build_id, Environment.git_sha[0..@min(Environment.git_sha.len, 8)] }); pub const os_name = if (Environment.isWindows) "win32" From 9808e5f5772d586dc442f8f58e8d6688805d36d8 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 04:40:33 +0100 Subject: [PATCH 47/51] revert some string format changes --- src/http.zig | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/http.zig b/src/http.zig index d2d86121e74f77..e22e94cc03808b 100644 --- a/src/http.zig +++ b/src/http.zig @@ -614,7 +614,7 @@ pub const RequestContext = struct { else => @compileError("Invalid code passed to printStatusLine"), }; - return std.fmt.comptimePrint("HTTP/1.1 {d} {any}\r\n", .{ code, status_text }); + return std.fmt.comptimePrint("HTTP/1.1 {d} {s}\r\n", .{ code, status_text }); } pub fn printStatusLineError(err: anyerror, buf: []u8) []const u8 { @@ -718,12 +718,12 @@ pub const RequestContext = struct { ctx.status = @as(HTTPStatusCode, 500); } - threadlocal var status_buf: [std.fmt.count("HTTP/1.1 {d} {any}\r\n", .{ 200, "OK" })]u8 = undefined; + threadlocal var status_buf: [std.fmt.count("HTTP/1.1 {d} {s}\r\n", .{ 200, "OK" })]u8 = undefined; pub fn writeStatusSlow(ctx: *RequestContext, code: u16) !void { _ = try ctx.writeSocket( try std.fmt.bufPrint( &status_buf, - "HTTP/1.1 {d} {any}\r\n", + "HTTP/1.1 {d} {s}\r\n", .{ code, if (code > 299) "HM" else "OK" }, ), SOCKET_FLAGS, From eb4724f81691fecd8bc68cdb5c78a921c377fc10 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 04:40:43 +0100 Subject: [PATCH 48/51] revert some string format changes --- src/meta.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/meta.zig b/src/meta.zig index c40f3ce8df3d41..cd171cc29c80a2 100644 --- a/src/meta.zig +++ b/src/meta.zig @@ -22,5 +22,5 @@ pub fn typeBaseName(comptime fullname: []const u8) []const u8 { const idx = comptime std.mem.lastIndexOf(u8, fullname, "."); const name = if (idx == null) fullname else fullname[(idx.? + 1)..]; - return comptime std.fmt.comptimePrint("{any}", .{name}); + return comptime std.fmt.comptimePrint("{s}", .{name}); } From 3c8444458fcb243e4efb9eb9898076bd52acd63f Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 04:40:53 +0100 Subject: [PATCH 49/51] @alignCast --- src/string_immutable.zig | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 2c4f174e31ef5f..973f448e2f1081 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -3076,7 +3076,8 @@ pub fn firstNonASCII16CheckMin(comptime Slice: type, slice: Slice, comptime chec // it removes a loop, but probably is slower in the end const cmp = @bitCast(AsciiVectorU16U1, vec > max_u16_ascii) | @bitCast(AsciiVectorU16U1, vec < min_u16_ascii); - const bitmask: u16 = @ptrCast(*const u16, &cmp).*; + const cmp_aligned = @alignCast(2, &cmp); + const bitmask: u16 = @ptrCast(*const u16, &cmp_aligned).*; const first = @ctz(@as(u16, bitmask)); return @intCast(u32, @as(u32, first) + @@ -3087,7 +3088,8 @@ pub fn firstNonASCII16CheckMin(comptime Slice: type, slice: Slice, comptime chec remaining.len -= (@ptrToInt(remaining.ptr) - @ptrToInt(remaining_start)) / 2; const cmp = vec > max_u16_ascii; - const bitmask = @ptrCast(*const u16, &cmp).*; + const cmp_aligned = @alignCast(2, &cmp); + const bitmask = @ptrCast(*const u16, &cmp_aligned).*; const first = @ctz(@as(u16, bitmask)); return @intCast(u32, @as(u32, first) + From ab5253c030c0ea4b6eff61132d8876c7027acd12 Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 04:41:10 +0100 Subject: [PATCH 50/51] revert some string format changes --- src/bun.js/api/bun/subprocess.zig | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index 1dbc045d3c0bb5..496f7ce722699b 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -1020,19 +1020,19 @@ pub const Subprocess = struct { } const stdin_pipe = if (stdio[0].isPiped()) os.pipe2(0) catch |err| { - globalThis.throw("failed to create stdin pipe: {s}", .{err}); + globalThis.throw("failed to create stdin pipe: {any}", .{err}); return .zero; } else undefined; errdefer if (stdio[0].isPiped()) destroyPipe(stdin_pipe); const stdout_pipe = if (stdio[1].isPiped()) os.pipe2(0) catch |err| { - globalThis.throw("failed to create stdout pipe: {s}", .{err}); + globalThis.throw("failed to create stdout pipe: {any}", .{err}); return .zero; } else undefined; errdefer if (stdio[1].isPiped()) destroyPipe(stdout_pipe); const stderr_pipe = if (stdio[2].isPiped()) os.pipe2(0) catch |err| { - globalThis.throw("failed to create stderr pipe: {s}", .{err}); + globalThis.throw("failed to create stderr pipe: {any}", .{err}); return .zero; } else undefined; errdefer if (stdio[2].isPiped()) destroyPipe(stderr_pipe); From 98df9d7703708cc7351563b9500d6a337003c3eb Mon Sep 17 00:00:00 2001 From: Vaughan Rouesnel Date: Mon, 14 Nov 2022 04:41:16 +0100 Subject: [PATCH 51/51] Fix: IterableDir stuff --- src/bun.js/node/dir_iterator.zig | 2 +- src/bun.js/node/node_fs_constant.zig | 1 + src/install/extract_tarball.zig | 2 +- src/libarchive/libarchive.zig | 3 ++- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/bun.js/node/dir_iterator.zig b/src/bun.js/node/dir_iterator.zig index 3af04a3db8fe1a..04a2dd6048a87f 100644 --- a/src/bun.js/node/dir_iterator.zig +++ b/src/bun.js/node/dir_iterator.zig @@ -46,7 +46,7 @@ pub const Iterator = switch (builtin.os.tag) { start_over: while (true) { if (self.index >= self.end_index) { const rc = os.system.__getdirentries64( - self.dir.fd, + self.dir.dir.fd, &self.buf, self.buf.len, &self.seek, diff --git a/src/bun.js/node/node_fs_constant.zig b/src/bun.js/node/node_fs_constant.zig index bf6247cdd4fdc0..2c53687fa57998 100644 --- a/src/bun.js/node/node_fs_constant.zig +++ b/src/bun.js/node/node_fs_constant.zig @@ -203,6 +203,7 @@ const constants_string2_opts = .{ "undefined" else // TODO(vjpr): Why was this needed? It caused an error in zig@0.10. + // It caused error due to `{any}` `{}` not being allowed during comptime. // std.fmt.comptimePrint("{}", .{Constants.O_SYMLINK}), // -- Constants.O_SYMLINK, diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index 75a5181950347c..3f5b3ff966f82a 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -277,7 +277,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !string { // create an index storing each version of a package installed create_index: { - var index_dir = cache_dir.makeOpenPathIterable(name, .{}) catch break :create_index; + var index_dir = cache_dir.makeOpenPath(name, .{}) catch break :create_index; defer index_dir.close(); index_dir.symLink( final_path, diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig index ff015294ec929d..8493f3fab70ba4 100644 --- a/src/libarchive/libarchive.zig +++ b/src/libarchive/libarchive.zig @@ -470,7 +470,7 @@ pub const Archive = struct { pub fn extractToDir( file_buffer: []const u8, - dir: std.fs.Dir, + iter_dir: std.fs.IterableDir, ctx: ?*Archive.Context, comptime FilePathAppender: type, appender: FilePathAppender, @@ -478,6 +478,7 @@ pub const Archive = struct { comptime close_handles: bool, comptime log: bool, ) !u32 { + const dir = iter_dir.dir; var entry: *lib.archive_entry = undefined; var stream: BufferReadStream = undefined;