Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support import with { type: "json" } and others #16624

Draft
wants to merge 14 commits into
base: main
Choose a base branch
from
2 changes: 2 additions & 0 deletions src/bun.js/api/JSTranspiler.zig
Original file line number Diff line number Diff line change
Expand Up @@ -492,6 +492,8 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
&transpiler.log,
source,
allocator,
.json,
false,
) catch null) orelse break :macros;
transpiler.macro_map = PackageJSON.parseMacrosJSON(allocator, json, &transpiler.log, &source);
}
Expand Down
4 changes: 4 additions & 0 deletions src/bun.js/bindings/ZigGlobalObject.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4224,6 +4224,10 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderFetch(JSGlobalObject* globalOb

if (params.type() == ScriptFetchParameters::Type::HostDefined) {
typeAttributeString = params.hostDefinedImportType();
} else if (params.type() == ScriptFetchParameters::Type::JSON) {
typeAttributeString = "json"_s;
} else if (params.type() == ScriptFetchParameters::Type::WebAssembly) {
typeAttributeString = "webassembly"_s;
}
}
}
Expand Down
51 changes: 13 additions & 38 deletions src/bun.js/module_loader.zig
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,7 @@ pub const RuntimeTranspilerStore = struct {
input_specifier: bun.String,
path: Fs.Path,
referrer: bun.String,
loader: bun.options.Loader,
) *anyopaque {
var job: *TranspilerJob = this.store.get();
const owned_path = Fs.Path.init(bun.default_allocator.dupe(u8, path.text) catch unreachable);
Expand All @@ -263,7 +264,7 @@ pub const RuntimeTranspilerStore = struct {
.non_threadsafe_referrer = referrer,
.vm = vm,
.log = logger.Log.init(bun.default_allocator),
.loader = vm.transpiler.options.loader(owned_path.name.ext),
.loader = loader,
.promise = JSC.Strong.create(JSC.JSValue.fromCell(promise), globalObject),
.poll_ref = .{},
.fetcher = TranspilerJob.Fetcher{
Expand Down Expand Up @@ -1507,7 +1508,7 @@ pub const ModuleLoader = struct {
const disable_transpilying = comptime flags.disableTranspiling();

if (comptime disable_transpilying) {
if (!(loader.isJavaScriptLike() or loader == .toml or loader == .text or loader == .json)) {
if (!(loader.isJavaScriptLike() or loader == .toml or loader == .text or loader == .json or loader == .jsonc)) {
// Don't print "export default <file path>"
return ResolvedSource{
.allocator = null,
Expand All @@ -1520,7 +1521,7 @@ pub const ModuleLoader = struct {
}

switch (loader) {
.js, .jsx, .ts, .tsx, .json, .toml, .text => {
.js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .text => {
jsc_vm.transpiled_count += 1;
jsc_vm.transpiler.resetStore();
const hash = bun.Watcher.getHash(path.text);
Expand Down Expand Up @@ -1659,7 +1660,7 @@ pub const ModuleLoader = struct {
}

var parse_result: ParseResult = switch (disable_transpilying or
(loader == .json and !path.isJSONCFile())) {
(loader == .json)) {
inline else => |return_file_only| brk: {
const heap_access = if (!disable_transpilying)
jsc_vm.jsc.releaseHeapAccess()
Expand Down Expand Up @@ -1738,7 +1739,7 @@ pub const ModuleLoader = struct {
return error.ParseError;
}

if (loader == .json and !path.isJSONCFile()) {
if (loader == .json) {
return ResolvedSource{
.allocator = null,
.source_code = bun.String.createUTF8(parse_result.source.contents),
Expand All @@ -1764,7 +1765,7 @@ pub const ModuleLoader = struct {
};
}

if (loader == .json or loader == .toml) {
if (loader == .json or loader == .jsonc or loader == .toml) {
if (parse_result.empty) {
return ResolvedSource{
.allocator = null,
Expand Down Expand Up @@ -2293,7 +2294,7 @@ pub const ModuleLoader = struct {

// Deliberately optional.
// The concurrent one only handles javascript-like loaders right now.
var loader: ?options.Loader = jsc_vm.transpiler.options.loaders.get(path.name.ext);
var loader: ?options.Loader = path.loader(&jsc_vm.transpiler.options.loaders);

if (jsc_vm.module_loader.eval_source) |eval_source| {
if (strings.endsWithComptime(specifier, bun.pathLiteral("/[eval]"))) {
Expand All @@ -2320,7 +2321,7 @@ pub const ModuleLoader = struct {
path = current_path;
}

loader = jsc_vm.transpiler.options.loaders.get(current_path.name.ext) orelse .tsx;
loader = current_path.loader(&jsc_vm.transpiler.options.loaders) orelse .tsx;
} else {
loader = .tsx;
}
Expand All @@ -2338,36 +2339,9 @@ pub const ModuleLoader = struct {
}
}

if (type_attribute) |attribute| {
if (attribute.eqlComptime("sqlite")) {
loader = .sqlite;
} else if (attribute.eqlComptime("text")) {
loader = .text;
} else if (attribute.eqlComptime("json")) {
loader = .json;
} else if (attribute.eqlComptime("toml")) {
loader = .toml;
} else if (attribute.eqlComptime("file")) {
loader = .file;
} else if (attribute.eqlComptime("js")) {
loader = .js;
} else if (attribute.eqlComptime("jsx")) {
loader = .jsx;
} else if (attribute.eqlComptime("ts")) {
loader = .ts;
} else if (attribute.eqlComptime("tsx")) {
loader = .tsx;
} else if (attribute.eqlComptime("html")) {
loader = .html;
}
}

// If we were going to choose file loader, see if it's a bun.lock
if (loader == null) {
if (strings.eqlComptime(path.name.filename, "bun.lock")) {
loader = .json;
}
}
if (type_attribute) |attribute| if (attribute.asUTF8()) |attr_utf8| if (bun.options.Loader.fromString(attr_utf8)) |attr_loader| {
loader = attr_loader;
};

// We only run the transpiler concurrently when we can.
// Today, that's:
Expand All @@ -2389,6 +2363,7 @@ pub const ModuleLoader = struct {
specifier_ptr.dupeRef(),
path,
referrer.dupeRef(),
concurrent_loader,
);
}
}
Expand Down
37 changes: 11 additions & 26 deletions src/bundler/bundle_v2.zig
Original file line number Diff line number Diff line change
Expand Up @@ -824,8 +824,8 @@ pub const BundleV2 = struct {
path.* = this.pathWithPrettyInitialized(path.*, target) catch bun.outOfMemory();
const loader: Loader = (brk: {
if (import_record.importer_source_index) |importer| {
var record: *ImportRecord = &this.graph.ast.items(.import_records)[importer].slice()[import_record.import_record_index];
if (record.loader()) |out_loader| {
const record: *ImportRecord = &this.graph.ast.items(.import_records)[importer].slice()[import_record.import_record_index];
if (record.loader) |out_loader| {
break :brk out_loader;
}
}
Expand Down Expand Up @@ -1329,10 +1329,7 @@ pub const BundleV2 = struct {
this.graph.input_files.append(bun.default_allocator, .{
.source = source,
.loader = loader,
.side_effects = switch (loader) {
.text, .json, .toml, .file => _resolver.SideEffects.no_side_effects__pure_data,
else => _resolver.SideEffects.has_side_effects,
},
.side_effects = loader.sideEffects(),
}) catch bun.outOfMemory();
var task = this.graph.allocator.create(ParseTask) catch bun.outOfMemory();
task.* = ParseTask.init(resolve_result, source_index, this);
Expand Down Expand Up @@ -1371,10 +1368,7 @@ pub const BundleV2 = struct {
this.graph.input_files.append(bun.default_allocator, .{
.source = source,
.loader = loader,
.side_effects = switch (loader) {
.text, .json, .toml, .file => .no_side_effects__pure_data,
else => .has_side_effects,
},
.side_effects = loader.sideEffects(),
}) catch bun.outOfMemory();
var task = this.graph.allocator.create(ParseTask) catch bun.outOfMemory();
task.* = .{
Expand Down Expand Up @@ -2666,12 +2660,12 @@ pub const BundleV2 = struct {
}

// By default, we treat .sqlite files as external.
if (import_record.tag == .with_type_sqlite) {
if (import_record.loader != null and import_record.loader.? == .sqlite) {
import_record.is_external_without_side_effects = true;
continue;
}

if (import_record.tag == .with_type_sqlite_embedded) {
if (import_record.loader != null and import_record.loader.? == .sqlite_embedded) {
import_record.is_external_without_side_effects = true;
}

Expand Down Expand Up @@ -2879,7 +2873,7 @@ pub const BundleV2 = struct {

// Figure out the loader.
{
if (import_record.tag.loader()) |loader| {
if (import_record.loader) |loader| {
resolve_task.loader = loader;
}

Expand Down Expand Up @@ -3767,10 +3761,10 @@ pub const ParseTask = struct {
),
};
},
.json => {
.json, .jsonc => |v| {
const trace = tracer(@src(), "ParseJSON");
defer trace.end();
const root = (try resolver.caches.json.parsePackageJSON(log, source, allocator, false)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty);
const root = (try resolver.caches.json.parseJSON(log, source, allocator, if (v == .jsonc) .jsonc else .json, true)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty);
return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?);
},
.toml => {
Expand Down Expand Up @@ -7507,22 +7501,13 @@ pub const LinkerContext = struct {
const source = &input_files[id];
const loader = loaders[record.source_index.get()];
switch (loader) {
.jsx, .js, .ts, .tsx, .napi, .sqlite, .json, .html => {
.jsx, .js, .ts, .tsx, .napi, .sqlite, .sqlite_embedded, .json, .jsonc, .html => {
this.log.addErrorFmt(
source,
record.range.loc,
this.allocator,
"Cannot import a \".{s}\" file into a CSS file",
.{@tagName(loader)},
) catch bun.outOfMemory();
},
.sqlite_embedded => {
this.log.addErrorFmt(
source,
record.range.loc,
this.allocator,
"Cannot import a \"sqlite_embedded\" file into a CSS file",
.{},
.{if (loader == .sqlite_embedded) "sqlite" else @tagName(loader)},
) catch bun.outOfMemory();
},
.css, .file, .toml, .wasm, .base64, .dataurl, .text, .bunsh => {},
Expand Down
8 changes: 4 additions & 4 deletions src/cache.zig
Original file line number Diff line number Diff line change
Expand Up @@ -316,15 +316,15 @@ pub const Json = struct {
break :handler null;
};
}
pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr {
pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, mode: enum { json, jsonc }, comptime force_utf8: bool) anyerror!?js_ast.Expr {
// tsconfig.* and jsconfig.* files are JSON files, but they are not valid JSON files.
// They are JSON files with comments and trailing commas.
// Sometimes tooling expects this to work.
if (source.path.isJSONCFile()) {
return try parse(cache, log, source, allocator, json_parser.parseTSConfig, true);
if (mode == .jsonc) {
return try parse(cache, log, source, allocator, json_parser.parseTSConfig, force_utf8);
}

return try parse(cache, log, source, allocator, json_parser.parse, false);
return try parse(cache, log, source, allocator, json_parser.parse, force_utf8);
}

pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, comptime force_utf8: bool) anyerror!?js_ast.Expr {
Expand Down
37 changes: 18 additions & 19 deletions src/fs.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1696,7 +1696,24 @@ pub const Path = struct {

const ext = this.name.ext;

return loaders.get(ext) orelse bun.options.Loader.fromString(ext);
const result = loaders.get(ext) orelse bun.options.Loader.fromString(ext);
if (result == null or result == .json) {
const str = this.name.filename;
if (strings.eqlComptime(str, "package.json") or strings.eqlComptime(str, "bun.lock")) {
return .jsonc;
}

if (strings.hasSuffixComptime(str, ".jsonc")) {
return .jsonc;
}

if (strings.hasPrefixComptime(str, "tsconfig.") or strings.hasPrefixComptime(str, "jsconfig.")) {
if (strings.hasSuffixComptime(str, ".json")) {
return .jsonc;
}
}
}
return result;
}

pub fn isDataURL(this: *const Path) bool {
Expand All @@ -1711,24 +1728,6 @@ pub const Path = struct {
return strings.eqlComptime(this.namespace, "macro");
}

pub fn isJSONCFile(this: *const Path) bool {
const str = this.name.filename;

if (strings.eqlComptime(str, "package.json") or strings.eqlComptime(str, "bun.lock")) {
return true;
}

if (strings.hasSuffixComptime(str, ".jsonc")) {
return true;
}

if (strings.hasPrefixComptime(str, "tsconfig.") or strings.hasPrefixComptime(str, "jsconfig.")) {
return strings.hasSuffixComptime(str, ".json");
}

return false;
}

pub const PackageRelative = struct {
path: string,
name: string,
Expand Down
40 changes: 1 addition & 39 deletions src/import_record.zig
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ pub const ImportRecord = struct {
path: fs.Path,
kind: ImportKind,
tag: Tag = .none,
loader: ?bun.options.Loader = null,

source_index: Index = Index.invalid,

Expand Down Expand Up @@ -167,10 +168,6 @@ pub const ImportRecord = struct {

pub const List = bun.BabyList(ImportRecord);

pub fn loader(this: *const ImportRecord) ?bun.options.Loader {
return this.tag.loader();
}

pub const Tag = enum {
/// A normal import to a user's source file
none,
Expand All @@ -189,43 +186,8 @@ pub const ImportRecord = struct {
/// crossover to the SSR graph. See bake.Framework.ServerComponents.separate_ssr_graph
bake_resolve_to_ssr_graph,

with_type_sqlite,
with_type_sqlite_embedded,
with_type_text,
with_type_json,
with_type_toml,
with_type_file,

tailwind,

pub fn loader(this: Tag) ?bun.options.Loader {
return switch (this) {
.with_type_sqlite => .sqlite,
.with_type_sqlite_embedded => .sqlite_embedded,
.with_type_text => .text,
.with_type_json => .json,
.with_type_toml => .toml,
.with_type_file => .file,
else => null,
};
}

pub fn onlySupportsDefaultImports(this: Tag) bool {
return switch (this) {
.with_type_file, .with_type_text => true,
else => false,
};
}

pub fn isSQLite(this: Tag) bool {
return switch (this) {
.with_type_sqlite,
.with_type_sqlite_embedded,
=> true,
else => false,
};
}

pub inline fn isRuntime(this: Tag) bool {
return this == .runtime;
}
Expand Down
Loading
Loading