Skip to content

Commit

Permalink
feat(bundler): implement enum inlining / more constant folding (#12144)
Browse files Browse the repository at this point in the history
Co-authored-by: paperdave <[email protected]>
Co-authored-by: Jarred Sumner <[email protected]>
Co-authored-by: Jarred-Sumner <[email protected]>
  • Loading branch information
4 people authored Jul 3, 2024
1 parent b9fba61 commit 688ddbd
Show file tree
Hide file tree
Showing 56 changed files with 2,736 additions and 1,955 deletions.
2 changes: 1 addition & 1 deletion build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ pub fn getOSGlibCVersion(os: OperatingSystem) ?Version {
}

pub fn build(b: *Build) !void {
std.debug.print("zig build v{s}\n", .{builtin.zig_version_string});
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});

b.zig_lib_dir = b.zig_lib_dir orelse b.path("src/deps/zig/lib");

Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
"test": "node scripts/runner.node.mjs ./build/bun-debug",
"test:release": "node scripts/runner.node.mjs ./build-release/bun",
"banned": "bun packages/bun-internal-test/src/linter.ts",
"zig-check": ".cache/zig/zig.exe build check --summary new",
"zig-check-all": ".cache/zig/zig.exe build check-all --summary new",
"zig-check-windows": ".cache/zig/zig.exe build check-windows --summary new",
Expand Down
6 changes: 5 additions & 1 deletion packages/bun-internal-test/src/banned.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,13 @@
"std.debug.assert": "Use bun.assert instead",
"std.debug.dumpStackTrace": "Use bun.handleErrorReturnTrace or bun.crash_handler.dumpStackTrace instead",
"std.debug.print": "Don't let this be committed",
"std.mem.indexOfAny": "Use bun.strings.indexAny or bun.strings.indexAnyComptime",
"std.mem.indexOfAny(": "Use bun.strings.indexOfAny",
"undefined != ": "This is by definition Undefined Behavior.",
"undefined == ": "This is by definition Undefined Behavior.",
"bun.toFD(std.fs.cwd().fd)": "Use bun.FD.cwd()",
"std.StringArrayHashMapUnmanaged(": "bun.StringArrayHashMapUnmanaged has a faster `eql`",
"std.StringArrayHashMap(": "bun.StringArrayHashMap has a faster `eql`",
"std.StringHashMapUnmanaged(": "bun.StringHashMapUnmanaged has a faster `eql`",
"std.StringHashMap(": "bun.StringHashMaphas a faster `eql`",
"": ""
}
4 changes: 1 addition & 3 deletions packages/bun-internal-test/src/linter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,7 @@ for (const [banned, suggestion] of Object.entries(BANNED)) {
if (banned.length === 0) continue;
// Run git grep to find occurrences of std.debug.assert in .zig files
// .nothrow() is here since git will exit with non-zero if no matches are found.
let stdout = await $`git grep -n -F "${banned}" "src/**/**.zig" | grep -v -F '//' | grep -v -F bench`
.nothrow()
.text();
let stdout = await $`git grep -n -F "${banned}" "src/**.zig" | grep -v -F '//' | grep -v -F bench`.nothrow().text();

stdout = stdout.trim();
if (stdout.length === 0) continue;
Expand Down
3 changes: 2 additions & 1 deletion src/ArenaAllocator.zig
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
const std = @import("std");
const assert = @import("root").bun.assert;
const bun = @import("root").bun;
const assert = bun.assert;
const mem = std.mem;
const Allocator = std.mem.Allocator;

Expand Down
13 changes: 1 addition & 12 deletions src/Global.zig
Original file line number Diff line number Diff line change
Expand Up @@ -171,18 +171,7 @@ pub inline fn configureAllocator(_: AllocatorConfiguration) void {
// if (!config.long_running) Mimalloc.mi_option_set(Mimalloc.mi_option_reset_delay, 0);
}

pub fn panic(comptime fmt: string, args: anytype) noreturn {
@setCold(true);
if (comptime Environment.isWasm) {
Output.printErrorln(fmt, args);
Output.flush();
@panic(fmt);
} else {
Output.prettyErrorln(fmt, args);
Output.flush();
std.debug.panic(fmt, args);
}
}
pub const panic = Output.panic; // deprecated

pub fn notimpl() noreturn {
@setCold(true);
Expand Down
6 changes: 3 additions & 3 deletions src/Progress.zig
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ const std = @import("std");
const builtin = @import("builtin");
const windows = std.os.windows;
const testing = std.testing;
const assert = std.debug.assert;
const assert = (std.debug).assert;
const Progress = @This();

/// `null` if the current node (and its children) should
Expand Down Expand Up @@ -246,7 +246,7 @@ fn clearWithHeldLock(p: *Progress, end_ptr: *usize) void {
end += (std.fmt.bufPrint(p.output_buffer[end..], "\x1b[{d}D", .{p.columns_written}) catch unreachable).len;
end += (std.fmt.bufPrint(p.output_buffer[end..], "\x1b[0K", .{}) catch unreachable).len;
} else if (builtin.os.tag == .windows) winapi: {
std.debug.assert(p.is_windows_terminal);
assert(p.is_windows_terminal);

var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined;
if (windows.kernel32.GetConsoleScreenBufferInfo(file.handle, &info) != windows.TRUE) {
Expand Down Expand Up @@ -357,7 +357,7 @@ fn refreshWithHeldLock(self: *Progress) void {

pub fn log(self: *Progress, comptime format: []const u8, args: anytype) void {
const file = self.terminal orelse {
std.debug.print(format, args);
(std.debug).print(format, args);
return;
};
self.refresh();
Expand Down
3 changes: 2 additions & 1 deletion src/StaticHashMap.zig
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ const mem = std.mem;
const math = std.math;
const testing = std.testing;

const assert = @import("root").bun.assert;
const bun = @import("root").bun;
const assert = bun.assert;

pub fn AutoHashMap(comptime K: type, comptime V: type, comptime max_load_percentage: comptime_int) type {
return HashMap(K, V, std.hash_map.AutoContext(K), max_load_percentage);
Expand Down
2 changes: 1 addition & 1 deletion src/api/schema.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2863,7 +2863,7 @@ pub const Api = struct {
};

pub const NpmRegistryMap = struct {
scopes: std.StringArrayHashMapUnmanaged(NpmRegistry) = .{},
scopes: bun.StringArrayHashMapUnmanaged(NpmRegistry) = .{},

pub fn decode(reader: anytype) anyerror!NpmRegistryMap {
var this = std.mem.zeroes(NpmRegistryMap);
Expand Down
131 changes: 21 additions & 110 deletions src/ast/base.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,10 @@ const std = @import("std");
const bun = @import("root").bun;
const unicode = std.unicode;

pub const JavascriptString = []u16;
pub fn newJavascriptString(comptime text: []const u8) JavascriptString {
return unicode.utf8ToUtf16LeStringLiteral(text);
}

pub const NodeIndex = u32;
pub const NodeIndexNone = 4294967293;

// TODO: figure out if we actually need this
// -- original comment --
// Files are parsed in parallel for speed. We want to allow each parser to
// generate symbol IDs that won't conflict with each other. We also want to be
// able to quickly merge symbol tables from all files into one giant symbol
// table.
//
// We can accomplish both goals by giving each symbol ID two parts: a source
// index that is unique to the parser goroutine, and an inner index that
// increments as the parser generates new symbol IDs. Then a symbol map can
// be an array of arrays indexed first by source index, then by inner index.
// The maps can be merged quickly by creating a single outer array containing
// all inner arrays from all parsed files.

pub const RefHashCtx = struct {
pub fn hash(_: @This(), key: Ref) u32 {
Expand All @@ -44,89 +27,6 @@ pub const RefCtx = struct {
}
};

/// Sets the range of bits starting at `start_bit` upto and excluding `start_bit` + `number_of_bits`
/// to be specific, if the range is N bits long, the N lower bits of `value` will be used; if any of
/// the other bits in `value` are set to 1, this function will panic.
///
/// ```zig
/// var val: u8 = 0b10000000;
/// setBits(&val, 2, 4, 0b00001101);
/// try testing.expectEqual(@as(u8, 0b10110100), val);
/// ```
///
/// ## Panics
/// This method will panic if the `value` exceeds the bit range of the type of `target`
pub fn setBits(
comptime TargetType: type,
target: TargetType,
comptime start_bit: comptime_int,
comptime number_of_bits: comptime_int,
value: TargetType,
) TargetType {
const end_bit = start_bit + number_of_bits;

comptime {
if (number_of_bits == 0) @compileError("non-zero number_of_bits must be provided");

if (@typeInfo(TargetType) == .Int) {
if (@typeInfo(TargetType).Int.signedness != .unsigned) {
@compileError("requires an unsigned integer, found " ++ @typeName(TargetType));
}
if (start_bit >= @bitSizeOf(TargetType)) {
@compileError("start_bit index is out of bounds of the bit field");
}
if (end_bit > @bitSizeOf(TargetType)) {
@compileError("start_bit + number_of_bits is out of bounds of the bit field");
}
} else if (@typeInfo(TargetType) == .ComptimeInt) {
@compileError("comptime_int is unsupported");
} else {
@compileError("requires an unsigned integer, found " ++ @typeName(TargetType));
}
}

if (comptime std.debug.runtime_safety) {
if (getBits(TargetType, value, 0, (end_bit - start_bit)) != value) @panic("value exceeds bit range");
}

const bitmask: TargetType = comptime blk: {
var bitmask = ~@as(TargetType, 0);
bitmask <<= (@bitSizeOf(TargetType) - end_bit);
bitmask >>= (@bitSizeOf(TargetType) - end_bit);
bitmask >>= start_bit;
bitmask <<= start_bit;
break :blk ~bitmask;
};

return (target & bitmask) | (value << start_bit);
}

pub inline fn getBits(comptime TargetType: type, target: anytype, comptime start_bit: comptime_int, comptime number_of_bits: comptime_int) TargetType {
comptime {
if (number_of_bits == 0) @compileError("non-zero number_of_bits must be provided");

if (@typeInfo(TargetType) == .Int) {
if (@typeInfo(TargetType).Int.signedness != .unsigned) {
@compileError("requires an unsigned integer, found " ++ @typeName(TargetType));
}
if (start_bit >= @bitSizeOf(TargetType)) {
@compileError("start_bit index is out of bounds of the bit field");
}
if (start_bit + number_of_bits > @bitSizeOf(TargetType)) {
@compileError("start_bit + number_of_bits is out of bounds of the bit field");
}
} else if (@typeInfo(TargetType) == .ComptimeInt) {
if (target < 0) {
@compileError("requires an unsigned integer, found " ++ @typeName(TargetType));
}
} else {
@compileError("requires an unsigned integer, found " ++ @typeName(TargetType));
}
}

return @as(TargetType, @truncate(target >> start_bit));
}

/// In some parts of Bun, we have many different IDs pointing to different things.
/// It's easy for them to get mixed up, so we use this type to make sure we don't.
///
Expand Down Expand Up @@ -186,6 +86,19 @@ pub const Index = packed struct(u32) {
}
};

/// -- original comment from esbuild --
///
/// Files are parsed in parallel for speed. We want to allow each parser to
/// generate symbol IDs that won't conflict with each other. We also want to be
/// able to quickly merge symbol tables from all files into one giant symbol
/// table.
///
/// We can accomplish both goals by giving each symbol ID two parts: a source
/// index that is unique to the parser goroutine, and an inner index that
/// increments as the parser generates new symbol IDs. Then a symbol map can
/// be an array of arrays indexed first by source index, then by inner index.
/// The maps can be merged quickly by creating a single outer array containing
/// all inner arrays from all parsed files.
pub const Ref = packed struct(u64) {
inner_index: Int = 0,

Expand All @@ -198,6 +111,9 @@ pub const Ref = packed struct(u64) {

source_index: Int = 0,

/// Represents a null state without using an extra bit
pub const None = Ref{ .inner_index = 0, .source_index = 0, .tag = .invalid };

pub inline fn isEmpty(this: Ref) bool {
return this.asU64() == 0;
}
Expand All @@ -222,7 +138,7 @@ pub const Ref = packed struct(u64) {
pub fn format(ref: Ref, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
try std.fmt.format(
writer,
"Ref[{d}, {d}, {s}]",
"Ref[inner={d}, src={d}, .{s}]",
.{
ref.sourceIndex(),
ref.innerIndex(),
Expand All @@ -235,9 +151,6 @@ pub const Ref = packed struct(u64) {
return this.tag != .invalid;
}

// 2 bits of padding for whatever is the parent
pub const None = Ref{ .inner_index = 0, .source_index = 0, .tag = .invalid };

pub inline fn sourceIndex(this: Ref) Int {
return this.source_index;
}
Expand All @@ -253,10 +166,7 @@ pub const Ref = packed struct(u64) {
pub fn init(inner_index: Int, source_index: usize, is_source_contents_slice: bool) Ref {
return .{
.inner_index = inner_index,

// if we overflow, we want a panic
.source_index = @as(Int, @intCast(source_index)),

.source_index = @intCast(source_index),
.tag = if (is_source_contents_slice) .source_contents_slice else .allocated_name,
};
}
Expand All @@ -278,9 +188,10 @@ pub const Ref = packed struct(u64) {
return bun.hash(&@as([8]u8, @bitCast(key.asU64())));
}

pub fn eql(ref: Ref, b: Ref) bool {
return asU64(ref) == b.asU64();
pub fn eql(ref: Ref, other: Ref) bool {
return ref.asU64() == other.asU64();
}

pub inline fn isNull(self: Ref) bool {
return self.tag == .invalid;
}
Expand Down
64 changes: 0 additions & 64 deletions src/bench/string-handling.zig

This file was deleted.

9 changes: 5 additions & 4 deletions src/bun.js/RuntimeTranspilerCache.zig
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// ** Update the version number when any breaking changes are made to the cache format or to the JS parser **
// Version 2 -> 3: "Infinity" becomes "1/0".
const expected_version = 3;
/// ** Update the version number when any breaking changes are made to the cache format or to the JS parser **
/// Version 3: "Infinity" becomes "1/0".
/// Version 4: TypeScript enums are properly handled + more constant folding
const expected_version = 4;

const bun = @import("root").bun;
const std = @import("std");
Expand Down Expand Up @@ -203,7 +204,7 @@ pub const RuntimeTranspilerCache = struct {
if (comptime bun.Environment.allow_assert) {
var metadata_stream2 = std.io.fixedBufferStream(metadata_buf[0..Metadata.size]);
var metadata2 = Metadata{};
metadata2.decode(metadata_stream2.reader()) catch |err| bun.Output.panic("Metadata did not rountrip encode -> decode successfully: {s}", .{@errorName(err)});
metadata2.decode(metadata_stream2.reader()) catch |err| bun.Output.panic("Metadata did not roundtrip encode -> decode successfully: {s}", .{@errorName(err)});
bun.assert(std.meta.eql(metadata, metadata2));
}

Expand Down
Loading

0 comments on commit 688ddbd

Please sign in to comment.