Skip to content

Commit

Permalink
Don't error on comments & trailing commas in package.json (#10287)
Browse files Browse the repository at this point in the history
* Don't error on comments & trailing commas in package.json

* Fix windows tests

* Fixup

* Update lockfile.zig

* Woopsie

* Woopsie

* Fix workspace path stuff on Windows

* Always decode

* Always decode

* Revert some things

* Update lockfile.zig
  • Loading branch information
Jarred-Sumner authored Apr 25, 2024
1 parent d966fe6 commit 49fa21f
Show file tree
Hide file tree
Showing 15 changed files with 260 additions and 90 deletions.
2 changes: 1 addition & 1 deletion src/bundler/bundle_v2.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2580,7 +2580,7 @@ pub const ParseTask = struct {
.json => {
const trace = tracer(@src(), "ParseJSON");
defer trace.end();
const root = (try resolver.caches.json.parseJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty);
const root = (try resolver.caches.json.parsePackageJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty);
return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?);
},
.toml => {
Expand Down
4 changes: 4 additions & 0 deletions src/cache.zig
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,10 @@ pub const Json = struct {
return try parse(cache, log, source, allocator, json_parser.ParseJSON);
}

pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr {
return try parse(cache, log, source, allocator, json_parser.ParseTSConfig);
}

pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr {
return try parse(cache, log, source, allocator, json_parser.ParseTSConfig);
}
Expand Down
2 changes: 1 addition & 1 deletion src/cli/bunx_command.zig
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ pub const BunxCommand = struct {
bun.JSAst.Expr.Data.Store.create(default_allocator);
bun.JSAst.Stmt.Data.Store.create(default_allocator);

const expr = try bun.JSON.ParseJSONUTF8(&source, bundler.log, bundler.allocator);
const expr = try bun.JSON.ParsePackageJSONUTF8(&source, bundler.log, bundler.allocator);

// choose the first package that fits
if (expr.get("bin")) |bin_expr| {
Expand Down
2 changes: 1 addition & 1 deletion src/cli/filter_arg.zig
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logge
};
defer allocator.free(json_source.contents);

const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator);
const json = try json_parser.ParsePackageJSONUTF8(&json_source, log, allocator);

const prop = json.asProperty("workspaces") orelse continue;

Expand Down
2 changes: 1 addition & 1 deletion src/cli/init_command.zig
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const std = @import("std");
const open = @import("../open.zig");
const CLI = @import("../cli.zig");
const Fs = @import("../fs.zig");
const ParseJSON = @import("../json_parser.zig").ParseJSONUTF8;
const ParseJSON = @import("../json_parser.zig").ParsePackageJSONUTF8;
const js_parser = bun.js_parser;
const js_ast = bun.JSAst;
const linker = @import("../linker.zig");
Expand Down
8 changes: 4 additions & 4 deletions src/install/install.zig
Original file line number Diff line number Diff line change
Expand Up @@ -5080,7 +5080,7 @@ pub const PackageManager = struct {
data.json_buf,
);
initializeStore();
const json = json_parser.ParseJSONUTF8(
const json = json_parser.ParsePackageJSONUTF8(
&package_json_source,
manager.log,
manager.allocator,
Expand Down Expand Up @@ -7055,7 +7055,7 @@ pub const PackageManager = struct {
const json_path = try bun.getFdPath(json_file.handle, &package_json_cwd_buf);
const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]);
initializeStore();
const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator);
const json = try json_parser.ParsePackageJSONUTF8(&json_source, ctx.log, ctx.allocator);
if (json.asProperty("workspaces")) |prop| {
const json_array = switch (prop.expr.data) {
.e_array => |arr| arr,
Expand Down Expand Up @@ -8349,7 +8349,7 @@ pub const PackageManager = struct {
current_package_json_buf[current_package_json_contents_len - 1] == '\n';

initializeStore();
var current_package_json = json_parser.ParseJSONUTF8(&package_json_source, ctx.log, manager.allocator) catch |err| {
var current_package_json = json_parser.ParsePackageJSONUTF8(&package_json_source, ctx.log, manager.allocator) catch |err| {
switch (Output.enable_ansi_colors) {
inline else => |enable_ansi_colors| {
ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {};
Expand Down Expand Up @@ -8500,7 +8500,7 @@ pub const PackageManager = struct {

// Now, we _re_ parse our in-memory edited package.json
// so we can commit the version we changed from the lockfile
current_package_json = json_parser.ParseJSONUTF8(&source, ctx.log, manager.allocator) catch |err| {
current_package_json = json_parser.ParsePackageJSONUTF8(&source, ctx.log, manager.allocator) catch |err| {
Output.prettyErrorln("<red>error<r><d>:<r> package.json failed to parse due to error {s}", .{@errorName(err)});
Global.exit(1);
return;
Expand Down
21 changes: 11 additions & 10 deletions src/install/lockfile.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2675,7 +2675,7 @@ pub const Package = extern struct {
};

initializeStore();
break :brk try json_parser.ParseJSONUTF8(
break :brk try json_parser.ParsePackageJSONUTF8(
&json_src,
log,
allocator,
Expand Down Expand Up @@ -3489,7 +3489,7 @@ pub const Package = extern struct {
comptime features: Features,
) !void {
initializeStore();
const json = json_parser.ParseJSONUTF8AlwaysDecode(&source, log, allocator) catch |err| {
const json = json_parser.ParsePackageJSONUTF8AlwaysDecode(&source, log, allocator) catch |err| {
switch (Output.enable_ansi_colors) {
inline else => |enable_ansi_colors| {
log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {};
Expand Down Expand Up @@ -3837,13 +3837,13 @@ pub const Package = extern struct {
workspace_allocator: std.mem.Allocator,
dir: std.fs.Dir,
path: []const u8,
path_buf: *[bun.MAX_PATH_BYTES]u8,
path_buf: *bun.PathBuffer,
name_to_copy: *[1024]u8,
log: *logger.Log,
) !WorkspaceEntry {
const path_to_use = if (path.len == 0) "package.json" else brk: {
const paths = [_]string{ path, "package.json" };
break :brk bun.path.joinStringBuf(path_buf, &paths, .posix);
break :brk bun.path.joinStringBuf(path_buf, &paths, .auto);
};

// TODO: windows
Expand All @@ -3869,7 +3869,7 @@ pub const Package = extern struct {
.name = name_to_copy[0..workspace_json.found_name.len],
.name_loc = workspace_json.name_loc,
};
debug("processWorkspaceName({s}) = {s}", .{ path_to_use, entry.name });
debug("processWorkspaceName({s}) = {s}", .{ path, entry.name });
if (workspace_json.has_found_version) {
entry.version = try allocator.dupe(u8, workspace_json.found_version);
}
Expand All @@ -3896,8 +3896,9 @@ pub const Package = extern struct {

var asterisked_workspace_paths = std.ArrayList(string).init(allocator);
defer asterisked_workspace_paths.deinit();
const filepath_buf = allocator.create([bun.MAX_PATH_BYTES]u8) catch unreachable;
defer allocator.destroy(filepath_buf);
const filepath_bufOS = allocator.create(bun.PathBuffer) catch unreachable;
const filepath_buf = std.mem.asBytes(filepath_bufOS);
defer allocator.destroy(filepath_bufOS);

for (arr.slice()) |item| {
defer fallback.fixed_buffer_allocator.reset();
Expand Down Expand Up @@ -3947,12 +3948,12 @@ pub const Package = extern struct {
workspace_allocator,
std.fs.cwd(),
input_path,
filepath_buf,
filepath_bufOS,
workspace_name_buf,
log,
) catch |err| {
switch (err) {
error.FileNotFound => {
error.EISNOTDIR, error.EISDIR, error.EACCESS, error.EPERM, error.ENOENT, error.FileNotFound => {
log.addErrorFmt(
source,
item.loc,
Expand Down Expand Up @@ -4083,7 +4084,7 @@ pub const Package = extern struct {
workspace_allocator,
dir_fd.asDir(),
"",
filepath_buf,
filepath_bufOS,
workspace_name_buf,
log,
) catch |err| {
Expand Down
56 changes: 50 additions & 6 deletions src/json_parser.zig
Original file line number Diff line number Diff line change
Expand Up @@ -378,6 +378,7 @@ pub const PackageJSONVersionChecker = struct {
.is_json = true,
.json_warn_duplicate_keys = false,
.allow_trailing_commas = true,
.allow_comments = true,
};

pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log) !Parser {
Expand Down Expand Up @@ -732,11 +733,12 @@ pub fn ParseJSON(
return try parser.parseExpr(false, false);
}

/// Parse JSON
/// Parse Package JSON
/// Allow trailing commas & comments.
/// This eagerly transcodes UTF-16 strings into UTF-8 strings
/// Use this when the text may need to be reprinted to disk as JSON (and not as JavaScript)
/// Eagerly converting UTF-8 to UTF-16 can cause a performance issue
pub fn ParseJSONUTF8(
pub fn ParsePackageJSONUTF8(
source: *const logger.Source,
log: *logger.Log,
allocator: std.mem.Allocator,
Expand All @@ -761,18 +763,24 @@ pub fn ParseJSONUTF8(
else => {},
}

var parser = try JSONParser.init(allocator, source.*, log);
var parser = try JSONLikeParser(.{
.is_json = true,
.always_decode_escape_sequences = false,
.allow_comments = true,
.allow_trailing_commas = true,
}).init(allocator, source.*, log);
bun.assert(parser.source().contents.len > 0);

return try parser.parseExpr(false, true);
}

pub fn ParseJSONUTF8AlwaysDecode(
pub fn ParsePackageJSONUTF8AlwaysDecode(
source: *const logger.Source,
log: *logger.Log,
allocator: std.mem.Allocator,
) !Expr {
const len = source.contents.len;

switch (len) {
// This is to be consisntent with how disabled JS files are handled
0 => {
Expand All @@ -794,11 +802,47 @@ pub fn ParseJSONUTF8AlwaysDecode(
var parser = try JSONLikeParser(.{
.is_json = true,
.always_decode_escape_sequences = true,
.allow_comments = true,
.allow_trailing_commas = true,
}).init(allocator, source.*, log);
if (comptime Environment.allow_assert) {
bun.assert(parser.source().contents.len > 0);
bun.assert(parser.source().contents.len > 0);

return try parser.parseExpr(false, true);
}

/// Parse Package JSON
/// Allow trailing commas & comments.
/// This eagerly transcodes UTF-16 strings into UTF-8 strings
/// Use this when the text may need to be reprinted to disk as JSON (and not as JavaScript)
/// Eagerly converting UTF-8 to UTF-16 can cause a performance issue
pub fn ParseJSONUTF8(
source: *const logger.Source,
log: *logger.Log,
allocator: std.mem.Allocator,
) !Expr {
const len = source.contents.len;

switch (len) {
// This is to be consisntent with how disabled JS files are handled
0 => {
return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data };
},
// This is a fast pass I guess
2 => {
if (strings.eqlComptime(source.contents[0..1], "\"\"") or strings.eqlComptime(source.contents[0..1], "''")) {
return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_string_data };
} else if (strings.eqlComptime(source.contents[0..1], "{}")) {
return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data };
} else if (strings.eqlComptime(source.contents[0..1], "[]")) {
return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_array_data };
}
},
else => {},
}

var parser = try JSONParser.init(allocator, source.*, log);
bun.assert(parser.source().contents.len > 0);

return try parser.parseExpr(false, true);
}

Expand Down
2 changes: 1 addition & 1 deletion src/resolver/package_json.zig
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ pub const PackageJSON = struct {
var json_source = logger.Source.initPathString(key_path.text, entry.contents);
json_source.path.pretty = r.prettyPath(json_source.path);

const json: js_ast.Expr = (r.caches.json.parseJSON(r.log, json_source, allocator) catch |err| {
const json: js_ast.Expr = (r.caches.json.parsePackageJSON(r.log, json_source, allocator) catch |err| {
if (Environment.isDebug) {
Output.printError("{s}: JSON parse error: {s}", .{ package_json_path, @errorName(err) });
}
Expand Down
49 changes: 37 additions & 12 deletions src/resolver/resolve_path.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1215,11 +1215,18 @@ pub fn joinZBuf(buf: []u8, _parts: anytype, comptime _platform: Platform) [:0]co
return buf[start_offset..][0..joined.len :0];
}
pub fn joinStringBuf(buf: []u8, parts: anytype, comptime _platform: Platform) []const u8 {
return joinStringBufT(u8, buf, parts, _platform);
}
pub fn joinStringBufW(buf: []u16, parts: anytype, comptime _platform: Platform) []const u16 {
return joinStringBufT(u16, buf, parts, _platform);
}

pub fn joinStringBufT(comptime T: type, buf: []T, parts: anytype, comptime _platform: Platform) []const T {
const platform = comptime _platform.resolve();

var written: usize = 0;
var temp_buf_: [4096]u8 = undefined;
var temp_buf: []u8 = &temp_buf_;
var temp_buf_: [4096]T = undefined;
var temp_buf: []T = &temp_buf_;
var free_temp_buf = false;
defer {
if (free_temp_buf) {
Expand All @@ -1234,7 +1241,7 @@ pub fn joinStringBuf(buf: []u8, parts: anytype, comptime _platform: Platform) []
}

if (count * 2 > temp_buf.len) {
temp_buf = bun.default_allocator.alloc(u8, count * 2) catch @panic("Out of memory");
temp_buf = bun.default_allocator.alloc(T, count * 2) catch @panic("Out of memory");
free_temp_buf = true;
}

Expand All @@ -1248,16 +1255,22 @@ pub fn joinStringBuf(buf: []u8, parts: anytype, comptime _platform: Platform) []
written += 1;
}

bun.copy(u8, temp_buf[written..], part);
written += part.len;
const Element = std.meta.Elem(@TypeOf(part));
if (comptime T == u16 and Element == u8) {
const wrote = bun.strings.convertUTF8toUTF16InBuffer(temp_buf[written..], part);
written += wrote.len;
} else {
bun.copy(T, temp_buf[written..], part);
written += part.len;
}
}

if (written == 0) {
buf[0] = '.';
return buf[0..1];
}

return normalizeStringNode(temp_buf[0..written], buf, platform);
return normalizeStringNodeT(T, temp_buf[0..written], buf, platform);
}

pub fn joinAbsStringBuf(cwd: []const u8, buf: []u8, _parts: anytype, comptime _platform: Platform) []const u8 {
Expand Down Expand Up @@ -1635,26 +1648,37 @@ pub fn normalizeStringNode(
buf: []u8,
comptime platform: Platform,
) []u8 {
return normalizeStringNodeT(u8, str, buf, platform);
}

pub fn normalizeStringNodeT(
comptime T: type,
str: []const T,
buf: []T,
comptime platform: Platform,
) []const T {
if (str.len == 0) {
buf[0] = '.';
return buf[0..1];
}

const is_absolute = platform.isAbsolute(str);
const trailing_separator = platform.isSeparator(str[str.len - 1]);
const is_absolute = platform.isAbsoluteT(T, str);
const trailing_separator = platform.isSeparatorT(T, str[str.len - 1]);

// `normalizeStringGeneric` handles absolute path cases for windows
// we should not prefix with /
var buf_ = if (platform == .windows) buf else buf[1..];

var out = if (!is_absolute) normalizeStringGeneric(
var out = if (!is_absolute) normalizeStringGenericT(
T,
str,
buf_,
true,
comptime platform.resolve().separator(),
comptime platform.getSeparatorFuncT(),
false,
) else normalizeStringGeneric(
) else normalizeStringGenericT(
T,
str,
buf_,
false,
Expand All @@ -1670,7 +1694,8 @@ pub fn normalizeStringNode(
}

if (trailing_separator) {
buf[0..2].* = platform.trailingSeparator();
const sep = platform.trailingSeparator();
buf[0..2].* = .{ sep[0], sep[1] };
return buf[0..2];
}

Expand All @@ -1679,7 +1704,7 @@ pub fn normalizeStringNode(
}

if (trailing_separator) {
if (!platform.isSeparator(out[out.len - 1])) {
if (!platform.isSeparatorT(T, out[out.len - 1])) {
buf_[out.len] = platform.separator();
out = buf_[0 .. out.len + 1];
}
Expand Down
2 changes: 1 addition & 1 deletion src/resolver/resolver.zig
Original file line number Diff line number Diff line change
Expand Up @@ -731,7 +731,7 @@ pub const Resolver = struct {
// support passing a package.json or path to a package
const pkg: *const PackageJSON = result.package_json orelse r.packageJSONForResolvedNodeModuleWithIgnoreMissingName(&result, true) orelse return error.MissingPackageJSON;

const json = (try r.caches.json.parseJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError;
const json = (try r.caches.json.parsePackageJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError;

pkg.loadFrameworkWithPreference(pair, json, r.allocator, load_defines, preference);
const dir = pkg.source.path.sourceDir();
Expand Down
Loading

0 comments on commit 49fa21f

Please sign in to comment.