>/>;
- ```
-
- I think I originally didn't implement it even though it's part of the [JSX specification](https://facebook.github.io/jsx/) because it previously didn't work in TypeScript (and potentially also in Babel?). However, support for it was [silently added in TypeScript 4.8](https://github.com/microsoft/TypeScript/pull/47994) without me noticing and Babel has also since fixed their [bugs regarding this feature](https://github.com/babel/babel/pull/6006). So I'm adding it to esbuild too now that I know it's widely supported.
-
- Keep in mind that there is some ongoing discussion about [removing this feature from JSX](https://github.com/facebook/jsx/issues/53). I agree that the syntax seems out of place (it does away with the elegance of "JSX is basically just XML with `{...}` escapes" for something arguably harder to read, which doesn't seem like a good trade-off), but it's in the specification and TypeScript and Babel both implement it so I'm going to have esbuild implement it too. However, I reserve the right to remove it from esbuild if it's ever removed from the specification in the future. So use it with caution.
-
-* Fix a bug with TypeScript type parsing ([#3574](https://github.com/evanw/esbuild/issues/3574))
-
- This release fixes a bug with esbuild's TypeScript parser where a conditional type containing a union type that ends with an infer type that ends with a constraint could fail to parse. This was caused by the "don't parse a conditional type" flag not getting passed through the union type parser. Here's an example of valid TypeScript code that previously failed to parse correctly:
-
- ```ts
- type InferUnion
= T extends { a: infer U extends number } | infer U extends number ? U : never
- ```
+All esbuild versions published in the year 2024 (versions 0.19.12 through 0.24.2) can be found in [CHANGELOG-2024.md](./CHANGELOG-2024.md).
## 2023
-All esbuild versions published in the year 2022 (versions 0.16.13 through 0.19.11) can be found in [CHANGELOG-2023.md](./CHANGELOG-2023.md).
+All esbuild versions published in the year 2023 (versions 0.16.13 through 0.19.11) can be found in [CHANGELOG-2023.md](./CHANGELOG-2023.md).
## 2022
diff --git a/Makefile b/Makefile
index 7647328651a..2b83898ac27 100644
--- a/Makefile
+++ b/Makefile
@@ -20,7 +20,7 @@ test-all:
@$(MAKE) --no-print-directory -j6 test-common test-deno ts-type-tests test-wasm-node test-wasm-browser lib-typecheck test-yarnpnp
check-go-version:
- @go version | grep ' go1\.22\.5 ' || (echo 'Please install Go version 1.22.5' && false)
+ @go version | grep ' go1\.23\.5 ' || (echo 'Please install Go version 1.23.5' && false)
# Note: Don't add "-race" here by default. The Go race detector is currently
# only supported on the following configurations:
@@ -69,6 +69,7 @@ test-deno: esbuild platform-deno
@echo '✅ deno tests passed' # I couldn't find a Deno API for telling when tests have failed, so I'm doing this here instead
deno eval 'import { transform, stop } from "file://$(shell pwd)/deno/mod.js"; console.log((await transform("1+2")).code); stop()' | grep "1 + 2;"
deno eval 'import { transform, stop } from "file://$(shell pwd)/deno/wasm.js"; console.log((await transform("1+2")).code); stop()' | grep "1 + 2;"
+ deno run -A './deno/mod.js' # See: https://github.com/evanw/esbuild/pull/3917
test-deno-windows: esbuild platform-deno
ESBUILD_BINARY_PATH=./esbuild.exe deno test --allow-run --allow-env --allow-net --allow-read --allow-write --no-check scripts/deno-tests.js
@@ -302,6 +303,7 @@ platform-all:
platform-linux-riscv64 \
platform-linux-s390x \
platform-linux-x64 \
+ platform-netbsd-arm64 \
platform-netbsd-x64 \
platform-neutral \
platform-openbsd-arm64 \
@@ -360,6 +362,9 @@ platform-freebsd-x64:
platform-freebsd-arm64:
@$(MAKE) --no-print-directory GOOS=freebsd GOARCH=arm64 NPMDIR=npm/@esbuild/freebsd-arm64 platform-unixlike
+platform-netbsd-arm64:
+ @$(MAKE) --no-print-directory GOOS=netbsd GOARCH=arm64 NPMDIR=npm/@esbuild/netbsd-arm64 platform-unixlike
+
platform-netbsd-x64:
@$(MAKE) --no-print-directory GOOS=netbsd GOARCH=amd64 NPMDIR=npm/@esbuild/netbsd-x64 platform-unixlike
@@ -441,22 +446,26 @@ publish-all: check-go-version
@echo Enter one-time password:
@read OTP && OTP="$$OTP" $(MAKE) --no-print-directory -j4 \
- publish-freebsd-x64 \
publish-freebsd-arm64 \
+ publish-freebsd-x64 \
publish-openbsd-arm64 \
- publish-openbsd-x64 \
+ publish-openbsd-x64
+
+ @echo Enter one-time password:
+ @read OTP && OTP="$$OTP" $(MAKE) --no-print-directory -j4 \
+ publish-darwin-arm64 \
+ publish-darwin-x64 \
+ publish-netbsd-arm64 \
publish-netbsd-x64
@echo Enter one-time password:
@read OTP && OTP="$$OTP" $(MAKE) --no-print-directory -j4 \
publish-android-x64 \
publish-android-arm \
- publish-android-arm64 \
- publish-darwin-x64
+ publish-android-arm64
@echo Enter one-time password:
@read OTP && OTP="$$OTP" $(MAKE) --no-print-directory -j4 \
- publish-darwin-arm64 \
publish-linux-x64 \
publish-linux-ia32 \
publish-linux-arm
@@ -521,6 +530,9 @@ publish-freebsd-x64: platform-freebsd-x64
publish-freebsd-arm64: platform-freebsd-arm64
test -n "$(OTP)" && cd npm/@esbuild/freebsd-arm64 && npm publish --otp="$(OTP)"
+publish-netbsd-arm64: platform-netbsd-arm64
+ test -n "$(OTP)" && cd npm/@esbuild/netbsd-arm64 && npm publish --otp="$(OTP)"
+
publish-netbsd-x64: platform-netbsd-x64
test -n "$(OTP)" && cd npm/@esbuild/netbsd-x64 && npm publish --otp="$(OTP)"
@@ -618,6 +630,7 @@ validate-builds:
@$(MAKE) --no-print-directory TARGET=platform-linux-riscv64 SCOPE=@esbuild/ PACKAGE=linux-riscv64 SUBPATH=bin/esbuild validate-build
@$(MAKE) --no-print-directory TARGET=platform-linux-s390x SCOPE=@esbuild/ PACKAGE=linux-s390x SUBPATH=bin/esbuild validate-build
@$(MAKE) --no-print-directory TARGET=platform-linux-x64 SCOPE=@esbuild/ PACKAGE=linux-x64 SUBPATH=bin/esbuild validate-build
+ @$(MAKE) --no-print-directory TARGET=platform-netbsd-arm64 SCOPE=@esbuild/ PACKAGE=netbsd-arm64 SUBPATH=bin/esbuild validate-build
@$(MAKE) --no-print-directory TARGET=platform-netbsd-x64 SCOPE=@esbuild/ PACKAGE=netbsd-x64 SUBPATH=bin/esbuild validate-build
@$(MAKE) --no-print-directory TARGET=platform-openbsd-arm64 SCOPE=@esbuild/ PACKAGE=openbsd-arm64 SUBPATH=bin/esbuild validate-build
@$(MAKE) --no-print-directory TARGET=platform-openbsd-x64 SCOPE=@esbuild/ PACKAGE=openbsd-x64 SUBPATH=bin/esbuild validate-build
@@ -654,6 +667,7 @@ clean:
rm -rf npm/@esbuild/linux-riscv64/bin
rm -rf npm/@esbuild/linux-s390x/bin
rm -rf npm/@esbuild/linux-x64/bin
+ rm -rf npm/@esbuild/netbsd-arm64/bin
rm -rf npm/@esbuild/netbsd-x64/bin
rm -rf npm/@esbuild/openbsd-arm64/bin
rm -rf npm/@esbuild/openbsd-x64/bin
diff --git a/cmd/esbuild/service.go b/cmd/esbuild/service.go
index 63bf38df758..f705ba8cbc9 100644
--- a/cmd/esbuild/service.go
+++ b/cmd/esbuild/service.go
@@ -372,7 +372,13 @@ func (service *serviceType) handleIncomingPacket(bytes []byte) {
options.Host = value.(string)
}
if value, ok := request["port"]; ok {
- options.Port = uint16(value.(int))
+ if value == 0 {
+ // 0 is the default value in Go, which we interpret as "try to
+ // pick port 8000". So Go uses -1 as the sentinel value instead.
+ options.Port = -1
+ } else {
+ options.Port = value.(int)
+ }
}
if value, ok := request["servedir"]; ok {
options.Servedir = value.(string)
@@ -418,11 +424,15 @@ func (service *serviceType) handleIncomingPacket(bytes []byte) {
if result, err := ctx.Serve(options); err != nil {
service.sendPacket(encodeErrorPacket(p.id, err))
} else {
+ hosts := make([]interface{}, len(result.Hosts))
+ for i, host := range result.Hosts {
+ hosts[i] = host
+ }
service.sendPacket(encodePacket(packet{
id: p.id,
value: map[string]interface{}{
- "port": int(result.Port),
- "host": result.Host,
+ "port": int(result.Port),
+ "hosts": hosts,
},
}))
}
diff --git a/cmd/esbuild/version.go b/cmd/esbuild/version.go
index 9a054852575..6957ac8d5ec 100644
--- a/cmd/esbuild/version.go
+++ b/cmd/esbuild/version.go
@@ -1,3 +1,3 @@
package main
-const esbuildVersion = "0.23.1"
+const esbuildVersion = "0.25.0"
diff --git a/compat-table/package-lock.json b/compat-table/package-lock.json
index 15ed1576a9e..3d6efd20173 100644
--- a/compat-table/package-lock.json
+++ b/compat-table/package-lock.json
@@ -5,16 +5,17 @@
"packages": {
"": {
"dependencies": {
- "@mdn/browser-compat-data": "5.5.34",
+ "@mdn/browser-compat-data": "5.6.25",
"@types/caniuse-lite": "1.0.1",
"@types/node": "20.3.2",
- "caniuse-lite": "1.0.30001636"
+ "caniuse-lite": "1.0.30001690"
}
},
"node_modules/@mdn/browser-compat-data": {
- "version": "5.5.34",
- "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.5.34.tgz",
- "integrity": "sha512-e8k7+8r3jiJuP7FMH6AL1OnmfQqLyABhTM+NmRDvFeAbMgtFcNQLHpmT7uza5cBnxI01+CAU3aSsIgcKGRdEBQ=="
+ "version": "5.6.25",
+ "resolved": "https://registry.npmjs.org/@mdn/browser-compat-data/-/browser-compat-data-5.6.25.tgz",
+ "integrity": "sha512-fvisUCC7JKWBmtpmnCMQk66AeHFUWNaKTGL7Z2NRDOKJ3kHMEnN23lmrurheWWXZ2sZ1YhpECmqKleDFW1OQeQ==",
+ "license": "CC0-1.0"
},
"node_modules/@types/caniuse-lite": {
"version": "1.0.1",
@@ -27,9 +28,9 @@
"integrity": "sha512-vOBLVQeCQfIcF/2Y7eKFTqrMnizK5lRNQ7ykML/5RuwVXVWxYkgwS7xbt4B6fKCUPgbSL5FSsjHQpaGQP/dQmw=="
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001636",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001636.tgz",
- "integrity": "sha512-bMg2vmr8XBsbL6Lr0UHXy/21m84FTxDLWn2FSqMd5PrlbMxwJlQnC2YWYxVgp66PZE+BBNF2jYQUBKCo1FDeZg==",
+ "version": "1.0.30001690",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001690.tgz",
+ "integrity": "sha512-5ExiE3qQN6oF8Clf8ifIDcMRCRE/dMGcETG/XGMD8/XiXm6HXQgQTh1yZYLXXpSOsEUlJm1Xr7kGULZTuGtP/w==",
"funding": [
{
"type": "opencollective",
@@ -43,7 +44,8 @@
"type": "github",
"url": "https://github.com/sponsors/ai"
}
- ]
+ ],
+ "license": "CC-BY-4.0"
}
}
}
diff --git a/compat-table/package.json b/compat-table/package.json
index 8287a7cf932..701a01d1d90 100644
--- a/compat-table/package.json
+++ b/compat-table/package.json
@@ -1,12 +1,12 @@
{
"githubDependencies": {
- "kangax/compat-table": "0f26d4f4e102f7dffb749111ac8cfa46103c10bc",
- "williamkapke/node-compat-table": "25e6df7a41013f1ea62c3b4116f9bb53c399bded"
+ "kangax/compat-table": "b3427ba3a2d6fd2dc427c3dfcf191832f634eb06",
+ "williamkapke/node-compat-table": "97a03aa807cfa8d83ad806fbcfe317ad95a7a159"
},
"dependencies": {
- "@mdn/browser-compat-data": "5.5.34",
+ "@mdn/browser-compat-data": "5.6.25",
"@types/caniuse-lite": "1.0.1",
"@types/node": "20.3.2",
- "caniuse-lite": "1.0.30001636"
+ "caniuse-lite": "1.0.30001690"
}
}
diff --git a/compat-table/src/mdn.ts b/compat-table/src/mdn.ts
index 93b9226d959..4051891dd36 100644
--- a/compat-table/src/mdn.ts
+++ b/compat-table/src/mdn.ts
@@ -34,31 +34,31 @@ const cssFeatures: Partial> = {
'css.types.color.oklch',
],
GradientDoublePosition: [
- 'css.types.image.gradient.conic-gradient.doubleposition',
- 'css.types.image.gradient.linear-gradient.doubleposition',
- 'css.types.image.gradient.radial-gradient.doubleposition',
- 'css.types.image.gradient.repeating-linear-gradient.doubleposition',
- 'css.types.image.gradient.repeating-radial-gradient.doubleposition',
+ 'css.types.gradient.conic-gradient.doubleposition',
+ 'css.types.gradient.linear-gradient.doubleposition',
+ 'css.types.gradient.radial-gradient.doubleposition',
+ 'css.types.gradient.repeating-linear-gradient.doubleposition',
+ 'css.types.gradient.repeating-radial-gradient.doubleposition',
],
GradientInterpolation: [
- 'css.types.image.gradient.conic-gradient.hue_interpolation_method',
- 'css.types.image.gradient.conic-gradient.interpolation_color_space',
- 'css.types.image.gradient.linear-gradient.hue_interpolation_method',
- 'css.types.image.gradient.linear-gradient.interpolation_color_space',
- 'css.types.image.gradient.radial-gradient.hue_interpolation_method',
- 'css.types.image.gradient.radial-gradient.interpolation_color_space',
- 'css.types.image.gradient.repeating-conic-gradient.hue_interpolation_method',
- 'css.types.image.gradient.repeating-conic-gradient.interpolation_color_space',
- 'css.types.image.gradient.repeating-linear-gradient.hue_interpolation_method',
- 'css.types.image.gradient.repeating-linear-gradient.interpolation_color_space',
- 'css.types.image.gradient.repeating-radial-gradient.hue_interpolation_method',
- 'css.types.image.gradient.repeating-radial-gradient.interpolation_color_space',
+ 'css.types.gradient.conic-gradient.hue_interpolation_method',
+ 'css.types.gradient.conic-gradient.interpolation_color_space',
+ 'css.types.gradient.linear-gradient.hue_interpolation_method',
+ 'css.types.gradient.linear-gradient.interpolation_color_space',
+ 'css.types.gradient.radial-gradient.hue_interpolation_method',
+ 'css.types.gradient.radial-gradient.interpolation_color_space',
+ 'css.types.gradient.repeating-conic-gradient.hue_interpolation_method',
+ 'css.types.gradient.repeating-conic-gradient.interpolation_color_space',
+ 'css.types.gradient.repeating-linear-gradient.hue_interpolation_method',
+ 'css.types.gradient.repeating-linear-gradient.interpolation_color_space',
+ 'css.types.gradient.repeating-radial-gradient.hue_interpolation_method',
+ 'css.types.gradient.repeating-radial-gradient.interpolation_color_space',
],
GradientMidpoints: [
- 'css.types.image.gradient.linear-gradient.interpolation_hints',
- 'css.types.image.gradient.radial-gradient.interpolation_hints',
- 'css.types.image.gradient.repeating-linear-gradient.interpolation_hints',
- 'css.types.image.gradient.repeating-radial-gradient.interpolation_hints',
+ 'css.types.gradient.linear-gradient.interpolation_hints',
+ 'css.types.gradient.radial-gradient.interpolation_hints',
+ 'css.types.gradient.repeating-linear-gradient.interpolation_hints',
+ 'css.types.gradient.repeating-radial-gradient.interpolation_hints',
],
HexRGBA: 'css.types.color.rgb_hexadecimal_notation.alpha_hexadecimal_notation',
HWB: 'css.types.color.hwb',
diff --git a/internal/bundler/bundler.go b/internal/bundler/bundler.go
index 67d9e41f54a..b4883a2261b 100644
--- a/internal/bundler/bundler.go
+++ b/internal/bundler/bundler.go
@@ -13,6 +13,7 @@ import (
"fmt"
"math/rand"
"net/http"
+ "net/url"
"sort"
"strings"
"sync"
@@ -120,11 +121,29 @@ type tlaCheck struct {
}
func parseFile(args parseArgs) {
+ pathForIdentifierName := args.keyPath.Text
+
+ // Identifier name generation may use the name of the parent folder if the
+ // file name starts with "index". However, this is problematic when the
+ // parent folder includes the parent directory of what the developer
+ // considers to be the root of the source tree. If that happens, strip the
+ // parent folder to avoid including it in the generated name.
+ if relative, ok := args.fs.Rel(args.options.AbsOutputBase, pathForIdentifierName); ok {
+ for {
+ next := strings.TrimPrefix(strings.TrimPrefix(relative, "../"), "..\\")
+ if relative == next {
+ break
+ }
+ relative = next
+ }
+ pathForIdentifierName = relative
+ }
+
source := logger.Source{
Index: args.sourceIndex,
KeyPath: args.keyPath,
PrettyPath: args.prettyPath,
- IdentifierName: js_ast.GenerateNonUniqueNameFromPath(args.keyPath.Text),
+ IdentifierName: js_ast.GenerateNonUniqueNameFromPath(pathForIdentifierName),
}
var loader config.Loader
@@ -171,7 +190,7 @@ func parseFile(args parseArgs) {
// The special "default" loader determines the loader from the file path
if loader == config.LoaderDefault {
- loader = loaderFromFileExtension(args.options.ExtensionToLoader, base+ext)
+ loader = config.LoaderFromFileExtension(args.options.ExtensionToLoader, base+ext)
}
// Reject unsupported import attributes when the loader isn't "copy" (since
@@ -304,6 +323,7 @@ func parseFile(args parseArgs) {
result.ok = ok
case config.LoaderText:
+ source.Contents = strings.TrimPrefix(source.Contents, "\xEF\xBB\xBF") // Strip any UTF-8 BOM from the text
encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents))
expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(source.Contents)}}
ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "")
@@ -625,16 +645,10 @@ func parseFile(args parseArgs) {
// Attempt to fill in null entries using the file system
for i, source := range sourceMap.Sources {
if sourceMap.SourcesContent[i].Value == nil {
- var absPath string
- if args.fs.IsAbs(source) {
- absPath = source
- } else if path.Namespace == "file" {
- absPath = args.fs.Join(args.fs.Dir(path.Text), source)
- } else {
- continue
- }
- if contents, err, _ := args.caches.FSCache.ReadFile(args.fs, absPath); err == nil {
- sourceMap.SourcesContent[i].Value = helpers.StringToUTF16(contents)
+ if sourceURL, err := url.Parse(source); err == nil && helpers.IsFileURL(sourceURL) {
+ if contents, err, _ := args.caches.FSCache.ReadFile(args.fs, helpers.FilePathFromFileURL(args.fs, sourceURL)); err == nil {
+ sourceMap.SourcesContent[i].Value = helpers.StringToUTF16(contents)
+ }
}
}
}
@@ -796,38 +810,68 @@ func extractSourceMapFromComment(
) (logger.Path, *string) {
// Support data URLs
if parsed, ok := resolver.ParseDataURL(comment.Text); ok {
- if contents, err := parsed.DecodeData(); err == nil {
- return logger.Path{Text: source.PrettyPath, IgnoredSuffix: "#sourceMappingURL"}, &contents
- } else {
+ contents, err := parsed.DecodeData()
+ if err != nil {
log.AddID(logger.MsgID_SourceMap_UnsupportedSourceMapComment, logger.Warning, tracker, comment.Range,
fmt.Sprintf("Unsupported source map comment: %s", err.Error()))
return logger.Path{}, nil
}
+ return logger.Path{Text: source.PrettyPath, IgnoredSuffix: "#sourceMappingURL"}, &contents
}
- // Relative path in a file with an absolute path
- if absResolveDir != "" {
- absPath := fs.Join(absResolveDir, comment.Text)
- path := logger.Path{Text: absPath, Namespace: "file"}
- contents, err, originalError := fsCache.ReadFile(fs, absPath)
- if log.Level <= logger.LevelDebug && originalError != nil {
- log.AddID(logger.MsgID_None, logger.Debug, tracker, comment.Range, fmt.Sprintf("Failed to read file %q: %s", resolver.PrettyPath(fs, path), originalError.Error()))
- }
- if err != nil {
- kind := logger.Warning
- if err == syscall.ENOENT {
- // Don't report a warning because this is likely unactionable
- kind = logger.Debug
- }
- log.AddID(logger.MsgID_SourceMap_MissingSourceMap, kind, tracker, comment.Range,
- fmt.Sprintf("Cannot read file %q: %s", resolver.PrettyPath(fs, path), err.Error()))
- return logger.Path{}, nil
- }
+ // Support file URLs of two forms:
+ //
+ // Relative: "./foo.js.map"
+ // Absolute: "file:///Users/User/Desktop/foo.js.map"
+ //
+ var absPath string
+ if commentURL, err := url.Parse(comment.Text); err != nil {
+ // Show a warning if the comment can't be parsed as a URL
+ log.AddID(logger.MsgID_SourceMap_UnsupportedSourceMapComment, logger.Warning, tracker, comment.Range,
+ fmt.Sprintf("Unsupported source map comment: %s", err.Error()))
+ return logger.Path{}, nil
+ } else if commentURL.Scheme != "" && commentURL.Scheme != "file" {
+ // URLs with schemes other than "file" are unsupported (e.g. "https"),
+ // but don't warn the user about this because it's not a bug they can fix
+ log.AddID(logger.MsgID_SourceMap_UnsupportedSourceMapComment, logger.Debug, tracker, comment.Range,
+ fmt.Sprintf("Unsupported source map comment: Unsupported URL scheme %q", commentURL.Scheme))
+ return logger.Path{}, nil
+ } else if commentURL.Host != "" && commentURL.Host != "localhost" {
+ // File URLs with hosts are unsupported (e.g. "file://foo.js.map")
+ log.AddID(logger.MsgID_SourceMap_UnsupportedSourceMapComment, logger.Warning, tracker, comment.Range,
+ fmt.Sprintf("Unsupported source map comment: Unsupported host %q in file URL", commentURL.Host))
+ return logger.Path{}, nil
+ } else if helpers.IsFileURL(commentURL) {
+ // Handle absolute file URLs
+ absPath = helpers.FilePathFromFileURL(fs, commentURL)
+ } else if absResolveDir == "" {
+ // Fail if plugins don't set a resolve directory
+ log.AddID(logger.MsgID_SourceMap_UnsupportedSourceMapComment, logger.Debug, tracker, comment.Range,
+ "Unsupported source map comment: Cannot resolve relative URL without a resolve directory")
+ return logger.Path{}, nil
+ } else {
+ // Join the (potentially relative) URL path from the comment text
+ // to the resolve directory path to form the final absolute path
+ absResolveURL := helpers.FileURLFromFilePath(absResolveDir)
+ if !strings.HasSuffix(absResolveURL.Path, "/") {
+ absResolveURL.Path += "/"
+ }
+ absPath = helpers.FilePathFromFileURL(fs, absResolveURL.ResolveReference(commentURL))
+ }
+
+ // Try to read the file contents
+ path := logger.Path{Text: absPath, Namespace: "file"}
+ if contents, err, _ := fsCache.ReadFile(fs, absPath); err == syscall.ENOENT {
+ log.AddID(logger.MsgID_SourceMap_MissingSourceMap, logger.Debug, tracker, comment.Range,
+ fmt.Sprintf("Cannot read file: %s", absPath))
+ return logger.Path{}, nil
+ } else if err != nil {
+ log.AddID(logger.MsgID_SourceMap_MissingSourceMap, logger.Warning, tracker, comment.Range,
+ fmt.Sprintf("Cannot read file %q: %s", resolver.PrettyPath(fs, path), err.Error()))
+ return logger.Path{}, nil
+ } else {
return path, &contents
}
-
- // Anything else is unsupported
- return logger.Path{}, nil
}
func sanitizeLocation(fs fs.FS, loc *logger.MsgLocation) {
@@ -1108,19 +1152,16 @@ func runOnLoadPlugins(
// Read normal modules from disk
if source.KeyPath.Namespace == "file" {
- if contents, err, originalError := fsCache.ReadFile(fs, source.KeyPath.Text); err == nil {
+ if contents, err, _ := fsCache.ReadFile(fs, source.KeyPath.Text); err == nil {
source.Contents = contents
return loaderPluginResult{
loader: config.LoaderDefault,
absResolveDir: fs.Dir(source.KeyPath.Text),
}, true
} else {
- if log.Level <= logger.LevelDebug && originalError != nil {
- log.AddID(logger.MsgID_None, logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read file %q: %s", source.KeyPath.Text, originalError.Error()))
- }
if err == syscall.ENOENT {
log.AddError(&tracker, importPathRange,
- fmt.Sprintf("Could not read from file: %s", source.KeyPath.Text))
+ fmt.Sprintf("Cannot read file: %s", source.KeyPath.Text))
return loaderPluginResult{}, false
} else {
log.AddError(&tracker, importPathRange,
@@ -1158,30 +1199,6 @@ func runOnLoadPlugins(
return loaderPluginResult{loader: config.LoaderNone}, true
}
-func loaderFromFileExtension(extensionToLoader map[string]config.Loader, base string) config.Loader {
- // Pick the loader with the longest matching extension. So if there's an
- // extension for ".css" and for ".module.css", we want to match the one for
- // ".module.css" before the one for ".css".
- if i := strings.IndexByte(base, '.'); i != -1 {
- for {
- if loader, ok := extensionToLoader[base[i:]]; ok {
- return loader
- }
- base = base[i+1:]
- i = strings.IndexByte(base, '.')
- if i == -1 {
- break
- }
- }
- } else {
- // If there's no extension, explicitly check for an extensionless loader
- if loader, ok := extensionToLoader[""]; ok {
- return loader
- }
- }
- return config.LoaderNone
-}
-
// Identify the path by its lowercase absolute path name with Windows-specific
// slashes substituted for standard slashes. This should hopefully avoid path
// issues on Windows where multiple different paths can refer to the same
@@ -1857,15 +1874,20 @@ func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint {
return nil
}
- // Parse all entry points that were resolved successfully
+ // Determine output paths for all entry points that were resolved successfully
+ type entryPointToParse struct {
+ index int
+ parse func() uint32
+ }
+ var entryPointsToParse []entryPointToParse
for i, info := range entryPointInfos {
if info.results == nil {
continue
}
for _, resolveResult := range info.results {
+ resolveResult := resolveResult
prettyPath := resolver.PrettyPath(s.fs, resolveResult.PathPair.Primary)
- sourceIndex := s.maybeParseFile(resolveResult, prettyPath, nil, logger.Range{}, nil, inputKindEntryPoint, nil)
outputPath := entryPoints[i].OutputPath
outputPathWasAutoGenerated := false
@@ -1900,9 +1922,17 @@ func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint {
outputPathWasAutoGenerated = true
}
+ // Defer parsing for this entry point until later
+ entryPointsToParse = append(entryPointsToParse, entryPointToParse{
+ index: len(entryMetas),
+ parse: func() uint32 {
+ return s.maybeParseFile(resolveResult, prettyPath, nil, logger.Range{}, nil, inputKindEntryPoint, nil)
+ },
+ })
+
entryMetas = append(entryMetas, graph.EntryPoint{
OutputPath: outputPath,
- SourceIndex: sourceIndex,
+ SourceIndex: ast.InvalidRef.SourceIndex,
OutputPathWasAutoGenerated: outputPathWasAutoGenerated,
})
}
@@ -1924,6 +1954,11 @@ func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint {
}
}
+ // Only parse entry points after "AbsOutputBase" has been determined
+ for _, toParse := range entryPointsToParse {
+ entryMetas[toParse.index].SourceIndex = toParse.parse()
+ }
+
// Turn all output paths back into relative paths, but this time relative to
// the "outbase" value we computed above
for i := range entryMetas {
@@ -2541,11 +2576,13 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann
// the entry point itself.
customFilePath := ""
useOutputFile := false
+ isEntryPoint := false
if result.file.inputFile.Loader == config.LoaderCopy {
if metaIndex, ok := entryPointSourceIndexToMetaIndex[uint32(sourceIndex)]; ok {
template = s.options.EntryPathTemplate
customFilePath = entryPointMeta[metaIndex].OutputPath
useOutputFile = s.options.AbsOutputFile != ""
+ isEntryPoint = true
}
}
@@ -2596,8 +2633,14 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann
helpers.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly),
len(bytes),
)
+ entryPointJSON := ""
+ if isEntryPoint {
+ entryPointJSON = fmt.Sprintf("\"entryPoint\": %s,\n ",
+ helpers.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly))
+ }
jsonMetadataChunk = fmt.Sprintf(
- "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d\n }",
+ "{\n \"imports\": [],\n \"exports\": [],\n %s\"inputs\": %s,\n \"bytes\": %d\n }",
+ entryPointJSON,
inputs,
len(bytes),
)
diff --git a/internal/bundler_tests/bundler_dce_test.go b/internal/bundler_tests/bundler_dce_test.go
index 27c71630e70..cf8955fcf0f 100644
--- a/internal/bundler_tests/bundler_dce_test.go
+++ b/internal/bundler_tests/bundler_dce_test.go
@@ -1272,9 +1272,9 @@ func TestTreeShakingReactElements(t *testing.T) {
}
func TestDisableTreeShaking(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "pure": {Flags: config.CallCanBeUnwrappedIfUnused},
- "some.fn": {Flags: config.CallCanBeUnwrappedIfUnused},
+ defines := config.ProcessDefines([]config.DefineData{
+ {KeyParts: []string{"pure"}, Flags: config.CallCanBeUnwrappedIfUnused},
+ {KeyParts: []string{"some", "fn"}, Flags: config.CallCanBeUnwrappedIfUnused},
})
dce_suite.expectBundled(t, bundled{
files: map[string]string{
@@ -1405,6 +1405,118 @@ func TestDeadCodeFollowingJump(t *testing.T) {
})
}
+func TestDeadCodeInsideEmptyTry(t *testing.T) {
+ dce_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/entry.js": `
+ try { foo() }
+ catch { require('./a') }
+ finally { require('./b') }
+
+ try {}
+ catch { require('./c') }
+ finally { require('./d') }
+ `,
+ "/a.js": ``,
+ "/b.js": ``,
+ "/c.js": `TEST FAILED`, // Dead code paths should not import code
+ "/d.js": ``,
+ },
+ entryPaths: []string{"/entry.js"},
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputFile: "/out.js",
+ },
+ })
+}
+
+func TestDeadCodeInsideUnusedCases(t *testing.T) {
+ dce_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/entry.js": `
+ // Unknown test value
+ switch (x) {
+ case 0: _ = require('./a'); break
+ case 1: _ = require('./b'); break
+ }
+
+ // Known test value
+ switch (1) {
+ case 0: _ = require('./FAIL-known-0'); break
+ case 1: _ = require('./a'); break
+ case 1: _ = require('./FAIL-known-1'); break
+ case 2: _ = require('./FAIL-known-2'); break
+ }
+
+ // Check for "default"
+ switch (0) {
+ case 1: _ = require('./FAIL-default-1'); break
+ default: _ = require('./a'); break
+ }
+ switch (1) {
+ case 1: _ = require('./a'); break
+ default: _ = require('./FAIL-default'); break
+ }
+ switch (0) {
+ case 1: _ = require('./FAIL-default-1'); break
+ default: _ = require('./FAIL-default'); break
+ case 0: _ = require('./a'); break
+ }
+
+ // Check for non-constant cases
+ switch (1) {
+ case x: _ = require('./a'); break
+ case 1: _ = require('./b'); break
+ case x: _ = require('./FAIL-x'); break
+ default: _ = require('./FAIL-x-default'); break
+ }
+
+ // Check for other kinds of jumps
+ for (const x of y)
+ switch (1) {
+ case 0: _ = require('./FAIL-continue-0'); continue
+ case 1: _ = require('./a'); continue
+ case 2: _ = require('./FAIL-continue-2'); continue
+ }
+ x = () => {
+ switch (1) {
+ case 0: _ = require('./FAIL-return-0'); return
+ case 1: _ = require('./a'); return
+ case 2: _ = require('./FAIL-return-2'); return
+ }
+ }
+
+ // Check for fall-through
+ switch ('b') {
+ case 'a': _ = require('./FAIL-fallthrough-a')
+ case 'b': _ = require('./a')
+ case 'c': _ = require('./b'); break
+ case 'd': _ = require('./FAIL-fallthrough-d')
+ }
+ switch ('b') {
+ case 'a': _ = require('./FAIL-fallthrough-a')
+ case 'b':
+ case 'c': _ = require('./a')
+ case 'd': _ = require('./b'); break
+ case 'e': _ = require('./FAIL-fallthrough-e')
+ }
+ `,
+ "/a.js": ``,
+ "/b.js": ``,
+ },
+ entryPaths: []string{"/entry.js"},
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputFile: "/out.js",
+ },
+ expectedScanLog: `entry.js: WARNING: This case clause will never be evaluated because it duplicates an earlier case clause
+entry.js: NOTE: The earlier case clause is here:
+entry.js: WARNING: This case clause will never be evaluated because it duplicates an earlier case clause
+entry.js: NOTE: The earlier case clause is here:
+`,
+ })
+}
+
func TestRemoveTrailingReturn(t *testing.T) {
dce_suite.expectBundled(t, bundled{
files: map[string]string{
@@ -3121,6 +3233,13 @@ func TestConstValueInliningDirectEval(t *testing.T) {
console.log(x, eval('x'))
}
`,
+ "/issue-4055.ts": `
+ const variable = false
+ ;(function () {
+ eval("var variable = true")
+ console.log(variable)
+ })()
+ `,
},
entryPaths: []string{
"/top-level-no-eval.js",
@@ -3129,6 +3248,7 @@ func TestConstValueInliningDirectEval(t *testing.T) {
"/nested-eval.js",
"/ts-namespace-no-eval.ts",
"/ts-namespace-eval.ts",
+ "/issue-4055.ts",
},
options: config.Options{
Mode: config.ModePassThrough,
diff --git a/internal/bundler_tests/bundler_default_test.go b/internal/bundler_tests/bundler_default_test.go
index dd8d6b8fbad..0af9b8a257d 100644
--- a/internal/bundler_tests/bundler_default_test.go
+++ b/internal/bundler_tests/bundler_default_test.go
@@ -3289,6 +3289,7 @@ func TestImportMetaNoBundle(t *testing.T) {
func TestLegalCommentsNone(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
+ "/empty.js": ``,
"/entry.js": `
import './a'
import './b'
@@ -3298,6 +3299,7 @@ func TestLegalCommentsNone(t *testing.T) {
"/b.js": `console.log('in b') //! Copyright notice 1`,
"/c.js": `console.log('in c') //! Copyright notice 2`,
+ "/empty.css": ``,
"/entry.css": `
@import "./a.css";
@import "./b.css";
@@ -3307,7 +3309,12 @@ func TestLegalCommentsNone(t *testing.T) {
"/b.css": `b { zoom: 2 } /*! Copyright notice 1 */`,
"/c.css": `c { zoom: 2 } /*! Copyright notice 2 */`,
},
- entryPaths: []string{"/entry.js", "/entry.css"},
+ entryPaths: []string{
+ "/entry.js",
+ "/entry.css",
+ "/empty.js",
+ "/empty.css",
+ },
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
@@ -3319,6 +3326,7 @@ func TestLegalCommentsNone(t *testing.T) {
func TestLegalCommentsInline(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
+ "/empty.js": ``,
"/entry.js": `
import './a'
import './b'
@@ -3328,6 +3336,7 @@ func TestLegalCommentsInline(t *testing.T) {
"/b.js": `console.log('in b') //! Copyright notice 1`,
"/c.js": `console.log('in c') //! Copyright notice 2`,
+ "/empty.css": ``,
"/entry.css": `
@import "./a.css";
@import "./b.css";
@@ -3337,7 +3346,12 @@ func TestLegalCommentsInline(t *testing.T) {
"/b.css": `b { zoom: 2 } /*! Copyright notice 1 */`,
"/c.css": `c { zoom: 2 } /*! Copyright notice 2 */`,
},
- entryPaths: []string{"/entry.js", "/entry.css"},
+ entryPaths: []string{
+ "/entry.js",
+ "/entry.css",
+ "/empty.js",
+ "/empty.css",
+ },
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
@@ -3349,6 +3363,7 @@ func TestLegalCommentsInline(t *testing.T) {
func TestLegalCommentsEndOfFile(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
+ "/empty.js": ``,
"/entry.js": `
import './a'
import './b'
@@ -3358,6 +3373,7 @@ func TestLegalCommentsEndOfFile(t *testing.T) {
"/b.js": `console.log('in b') //! Copyright notice 1`,
"/c.js": `console.log('in c') //! Copyright notice 2`,
+ "/empty.css": ``,
"/entry.css": `
@import "./a.css";
@import "./b.css";
@@ -3367,7 +3383,12 @@ func TestLegalCommentsEndOfFile(t *testing.T) {
"/b.css": `b { zoom: 2 } /*! Copyright notice 1 */`,
"/c.css": `c { zoom: 2 } /*! Copyright notice 2 */`,
},
- entryPaths: []string{"/entry.js", "/entry.css"},
+ entryPaths: []string{
+ "/entry.js",
+ "/entry.css",
+ "/empty.js",
+ "/empty.css",
+ },
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
@@ -3379,6 +3400,7 @@ func TestLegalCommentsEndOfFile(t *testing.T) {
func TestLegalCommentsLinked(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
+ "/empty.js": ``,
"/entry.js": `
import './a'
import './b'
@@ -3388,6 +3410,7 @@ func TestLegalCommentsLinked(t *testing.T) {
"/b.js": `console.log('in b') //! Copyright notice 1`,
"/c.js": `console.log('in c') //! Copyright notice 2`,
+ "/empty.css": ``,
"/entry.css": `
@import "./a.css";
@import "./b.css";
@@ -3397,7 +3420,12 @@ func TestLegalCommentsLinked(t *testing.T) {
"/b.css": `b { zoom: 2 } /*! Copyright notice 1 */`,
"/c.css": `c { zoom: 2 } /*! Copyright notice 2 */`,
},
- entryPaths: []string{"/entry.js", "/entry.css"},
+ entryPaths: []string{
+ "/entry.js",
+ "/entry.css",
+ "/empty.js",
+ "/empty.css",
+ },
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
@@ -3409,6 +3437,7 @@ func TestLegalCommentsLinked(t *testing.T) {
func TestLegalCommentsExternal(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
+ "/empty.js": ``,
"/entry.js": `
import './a'
import './b'
@@ -3418,6 +3447,7 @@ func TestLegalCommentsExternal(t *testing.T) {
"/b.js": `console.log('in b') //! Copyright notice 1`,
"/c.js": `console.log('in c') //! Copyright notice 2`,
+ "/empty.css": ``,
"/entry.css": `
@import "./a.css";
@import "./b.css";
@@ -3427,7 +3457,12 @@ func TestLegalCommentsExternal(t *testing.T) {
"/b.css": `b { zoom: 2 } /*! Copyright notice 1 */`,
"/c.css": `c { zoom: 2 } /*! Copyright notice 2 */`,
},
- entryPaths: []string{"/entry.js", "/entry.css"},
+ entryPaths: []string{
+ "/entry.js",
+ "/entry.css",
+ "/empty.js",
+ "/empty.css",
+ },
options: config.Options{
Mode: config.ModeBundle,
AbsOutputDir: "/out",
@@ -4618,23 +4653,27 @@ func TestInjectDuplicate(t *testing.T) {
}
func TestInject(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "chain.prop": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"chain", "prop"},
DefineExpr: &config.DefineExpr{
Parts: []string{"replace"},
},
},
- "obj.defined": {
+ {
+ KeyParts: []string{"obj", "defined"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.EString{Value: helpers.StringToUTF16("defined")},
},
},
- "injectedAndDefined": {
+ {
+ KeyParts: []string{"injectedAndDefined"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.EString{Value: helpers.StringToUTF16("should be used")},
},
},
- "injected.and.defined": {
+ {
+ KeyParts: []string{"injected", "and", "defined"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.EString{Value: helpers.StringToUTF16("should be used")},
},
@@ -4714,23 +4753,27 @@ func TestInject(t *testing.T) {
}
func TestInjectNoBundle(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "chain.prop": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"chain", "prop"},
DefineExpr: &config.DefineExpr{
Parts: []string{"replace"},
},
},
- "obj.defined": {
+ {
+ KeyParts: []string{"obj", "defined"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.EString{Value: helpers.StringToUTF16("defined")},
},
},
- "injectedAndDefined": {
+ {
+ KeyParts: []string{"injectedAndDefined"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.EString{Value: helpers.StringToUTF16("should be used")},
},
},
- "injected.and.defined": {
+ {
+ KeyParts: []string{"injected", "and", "defined"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.EString{Value: helpers.StringToUTF16("should be used")},
},
@@ -4804,13 +4847,15 @@ func TestInjectNoBundle(t *testing.T) {
}
func TestInjectJSX(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "React.createElement": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"React", "createElement"},
DefineExpr: &config.DefineExpr{
Parts: []string{"el"},
},
},
- "React.Fragment": {
+ {
+ KeyParts: []string{"React", "Fragment"},
DefineExpr: &config.DefineExpr{
Parts: []string{"frag"},
},
@@ -4927,9 +4972,10 @@ func TestInjectImportOrder(t *testing.T) {
}
func TestInjectAssign(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "defined": {DefineExpr: &config.DefineExpr{Parts: []string{"some", "define"}}},
- })
+ defines := config.ProcessDefines([]config.DefineData{{
+ KeyParts: []string{"defined"},
+ DefineExpr: &config.DefineExpr{Parts: []string{"some", "define"}},
+ }})
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
@@ -5004,9 +5050,9 @@ func TestInjectWithDefine(t *testing.T) {
"both": {DefineExpr: &config.DefineExpr{Constant: &js_ast.EString{Value: helpers.StringToUTF16("define")}}},
"first": {DefineExpr: &config.DefineExpr{Parts: []string{"second"}}},
},
- DotDefines: map[string][]config.DotDefine{
- "th": {{Parts: []string{"bo", "th"}, Data: config.DefineData{DefineExpr: &config.DefineExpr{Constant: &js_ast.EString{Value: helpers.StringToUTF16("defi.ne")}}}}},
- "st": {{Parts: []string{"fir", "st"}, Data: config.DefineData{DefineExpr: &config.DefineExpr{Parts: []string{"seco", "nd"}}}}},
+ DotDefines: map[string][]config.DefineData{
+ "th": {{KeyParts: []string{"bo", "th"}, DefineExpr: &config.DefineExpr{Constant: &js_ast.EString{Value: helpers.StringToUTF16("defi.ne")}}}},
+ "st": {{KeyParts: []string{"fir", "st"}, DefineExpr: &config.DefineExpr{Parts: []string{"seco", "nd"}}}},
},
},
},
@@ -5075,18 +5121,21 @@ func TestAvoidTDZNoBundle(t *testing.T) {
}
func TestDefineImportMeta(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "import.meta": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"import", "meta"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 1},
},
},
- "import.meta.foo": {
+ {
+ KeyParts: []string{"import", "meta", "foo"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 2},
},
},
- "import.meta.foo.bar": {
+ {
+ KeyParts: []string{"import", "meta", "foo", "bar"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 3},
},
@@ -5119,8 +5168,9 @@ func TestDefineImportMeta(t *testing.T) {
}
func TestDefineImportMetaES5(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "import.meta.x": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"import", "meta", "x"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 1},
},
@@ -5193,18 +5243,21 @@ func TestInjectImportMeta(t *testing.T) {
}
func TestDefineThis(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "this": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"this"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 1},
},
},
- "this.foo": {
+ {
+ KeyParts: []string{"this", "foo"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 2},
},
},
- "this.foo.bar": {
+ {
+ KeyParts: []string{"this", "foo", "bar"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 3},
},
@@ -5259,13 +5312,12 @@ func TestDefineThis(t *testing.T) {
}
func TestDefineOptionalChain(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "a.b.c": {
- DefineExpr: &config.DefineExpr{
- Constant: &js_ast.ENumber{Value: 1},
- },
+ defines := config.ProcessDefines([]config.DefineData{{
+ KeyParts: []string{"a", "b", "c"},
+ DefineExpr: &config.DefineExpr{
+ Constant: &js_ast.ENumber{Value: 1},
},
- })
+ }})
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
@@ -5294,13 +5346,12 @@ func TestDefineOptionalChain(t *testing.T) {
}
func TestDefineOptionalChainLowered(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "a.b.c": {
- DefineExpr: &config.DefineExpr{
- Constant: &js_ast.ENumber{Value: 1},
- },
+ defines := config.ProcessDefines([]config.DefineData{{
+ KeyParts: []string{"a", "b", "c"},
+ DefineExpr: &config.DefineExpr{
+ Constant: &js_ast.ENumber{Value: 1},
},
- })
+ }})
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
@@ -5331,13 +5382,15 @@ func TestDefineOptionalChainLowered(t *testing.T) {
// See: https://github.com/evanw/esbuild/issues/3551
func TestDefineOptionalChainPanicIssue3551(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "x": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"x"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 1},
},
},
- "a.b": {
+ {
+ KeyParts: []string{"a", "b"},
DefineExpr: &config.DefineExpr{
Constant: &js_ast.ENumber{Value: 1},
},
@@ -5389,23 +5442,27 @@ func TestDefineOptionalChainPanicIssue3551(t *testing.T) {
// See: https://github.com/evanw/esbuild/issues/2407
func TestDefineInfiniteLoopIssue2407(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "a.b": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"a", "b"},
DefineExpr: &config.DefineExpr{
Parts: []string{"b", "c"},
},
},
- "b.c": {
+ {
+ KeyParts: []string{"b", "c"},
DefineExpr: &config.DefineExpr{
Parts: []string{"c", "a"},
},
},
- "c.a": {
+ {
+ KeyParts: []string{"c", "a"},
DefineExpr: &config.DefineExpr{
Parts: []string{"a", "b"},
},
},
- "x.y": {
+ {
+ KeyParts: []string{"x", "y"},
DefineExpr: &config.DefineExpr{
Parts: []string{"y"},
},
@@ -5428,33 +5485,39 @@ func TestDefineInfiniteLoopIssue2407(t *testing.T) {
}
func TestDefineAssignWarning(t *testing.T) {
- defines := config.ProcessDefines(map[string]config.DefineData{
- "a": {
+ defines := config.ProcessDefines([]config.DefineData{
+ {
+ KeyParts: []string{"a"},
DefineExpr: &config.DefineExpr{
Constant: js_ast.ENullShared,
},
},
- "b.c": {
+ {
+ KeyParts: []string{"b", "c"},
DefineExpr: &config.DefineExpr{
Constant: js_ast.ENullShared,
},
},
- "d": {
+ {
+ KeyParts: []string{"d"},
DefineExpr: &config.DefineExpr{
Parts: []string{"ident"},
},
},
- "e.f": {
+ {
+ KeyParts: []string{"e", "f"},
DefineExpr: &config.DefineExpr{
Parts: []string{"ident"},
},
},
- "g": {
+ {
+ KeyParts: []string{"g"},
DefineExpr: &config.DefineExpr{
Parts: []string{"dot", "chain"},
},
},
- "h.i": {
+ {
+ KeyParts: []string{"h", "i"},
DefineExpr: &config.DefineExpr{
Parts: []string{"dot", "chain"},
},
@@ -9095,3 +9158,91 @@ func TestStringExportNamesIIFE(t *testing.T) {
},
})
}
+
+func TestSourceIdentifierNameIndexSingleEntry(t *testing.T) {
+ default_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ // Generate identifier names for top-level and nested files
+ "/Users/user/project/index.js": `
+ require('.')
+ require('pkg')
+ require('./nested')
+ `,
+ "/Users/user/project/nested/index.js": `exports.nested = true`,
+ "/Users/user/project/node_modules/pkg/index.js": `exports.pkg = true`,
+ },
+ entryPaths: []string{"/Users/user/project/index.js"},
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputDir: "/Users/user/project/out",
+ },
+ })
+}
+
+func TestSourceIdentifierNameIndexMultipleEntry(t *testing.T) {
+ default_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ // Generate identifier names for top-level and nested files
+ "/Users/user/project/home/index.js": `
+ require('.')
+ require('pkg')
+ require('../common')
+ `,
+ "/Users/user/project/about/index.js": `
+ require('.')
+ require('pkg')
+ require('../common')
+ `,
+ "/Users/user/project/common/index.js": `exports.common = true`,
+ "/Users/user/project/node_modules/pkg/index.js": `exports.pkg = true`,
+ },
+ entryPaths: []string{
+ "/Users/user/project/home/index.js",
+ "/Users/user/project/about/index.js",
+ },
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputDir: "/Users/user/project/out",
+ },
+ })
+}
+
+func TestResolveExtensionsOrderIssue4053(t *testing.T) {
+ css_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/entry.js": `
+ import test from "./Test"
+ import image from "expo-image"
+ console.log(test === 'Test.web.tsx')
+ console.log(image === 'Image.web.tsx')
+ `,
+ "/Test.web.tsx": `export default 'Test.web.tsx'`,
+ "/Test.tsx": `export default 'Test.tsx'`,
+ "/node_modules/expo-image/index.js": `
+ export { default } from "./Image"
+ `,
+ "/node_modules/expo-image/Image.web.tsx": `export default 'Image.web.tsx'`,
+ "/node_modules/expo-image/Image.tsx": `export default 'Image.tsx'`,
+ },
+ entryPaths: []string{"/entry.js"},
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputFile: "/out.js",
+ ExtensionOrder: []string{
+ ".web.mjs",
+ ".mjs",
+ ".web.js",
+ ".js",
+ ".web.mts",
+ ".mts",
+ ".web.ts",
+ ".ts",
+ ".web.jsx",
+ ".jsx",
+ ".web.tsx",
+ ".tsx",
+ ".json",
+ },
+ },
+ })
+}
diff --git a/internal/bundler_tests/bundler_loader_test.go b/internal/bundler_tests/bundler_loader_test.go
index 73e81dc4fb5..3ff10dd44c2 100644
--- a/internal/bundler_tests/bundler_loader_test.go
+++ b/internal/bundler_tests/bundler_loader_test.go
@@ -1194,6 +1194,7 @@ func TestLoaderCopyWithBundleEntryPoint(t *testing.T) {
".css": config.LoaderCSS,
".file": config.LoaderCopy,
},
+ NeedsMetafile: true,
},
})
}
@@ -1298,7 +1299,8 @@ func TestWithTypeJSONOverrideLoader(t *testing.T) {
},
entryPaths: []string{"/entry.js"},
options: config.Options{
- Mode: config.ModeBundle,
+ Mode: config.ModeBundle,
+ AbsOutputFile: "/out.js",
},
})
}
@@ -1313,7 +1315,8 @@ func TestWithTypeJSONOverrideLoaderGlob(t *testing.T) {
},
entryPaths: []string{"/entry.js"},
options: config.Options{
- Mode: config.ModeBundle,
+ Mode: config.ModeBundle,
+ AbsOutputFile: "/out.js",
},
})
}
@@ -1382,6 +1385,7 @@ func TestEmptyLoaderJS(t *testing.T) {
".js": config.LoaderJS,
".empty": config.LoaderEmpty,
},
+ AbsOutputFile: "/out.js",
},
expectedCompileLog: `entry.js: WARNING: Import "named" will always be undefined because the file "d.empty" has no exports
`,
@@ -1407,6 +1411,7 @@ func TestEmptyLoaderCSS(t *testing.T) {
".css": config.LoaderCSS,
".empty": config.LoaderEmpty,
},
+ AbsOutputFile: "/out.js",
},
})
}
@@ -1426,6 +1431,7 @@ func TestExtensionlessLoaderJS(t *testing.T) {
".js": config.LoaderJS,
"": config.LoaderJS,
},
+ AbsOutputFile: "/out.js",
},
})
}
@@ -1445,6 +1451,7 @@ func TestExtensionlessLoaderCSS(t *testing.T) {
".css": config.LoaderCSS,
"": config.LoaderCSS,
},
+ AbsOutputFile: "/out.js",
},
})
}
@@ -1739,3 +1746,43 @@ func TestLoaderJSONPrototypeES5(t *testing.T) {
},
})
}
+
+func TestLoaderJSONWithBigInt(t *testing.T) {
+ loader_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/entry.js": `
+ import data from "./data.json"
+ console.log(data)
+ `,
+ "/data.json": `{
+ "invalid": [123n]
+ }`,
+ },
+ entryPaths: []string{"/entry.js"},
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputFile: "/out.js",
+ },
+ expectedScanLog: `data.json: ERROR: Unexpected "123n" in JSON
+`,
+ })
+}
+
+func TestLoaderTextUTF8BOM(t *testing.T) {
+ loader_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/entry.js": `
+ import data1 from "./data1.txt"
+ import data2 from "./data2.txt"
+ console.log(data1, data2)
+ `,
+ "/data1.txt": "\xEF\xBB\xBFtext",
+ "/data2.txt": "text\xEF\xBB\xBF",
+ },
+ entryPaths: []string{"/entry.js"},
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputFile: "/out.js",
+ },
+ })
+}
diff --git a/internal/bundler_tests/bundler_lower_test.go b/internal/bundler_tests/bundler_lower_test.go
index c43d9f6a959..d162b00f090 100644
--- a/internal/bundler_tests/bundler_lower_test.go
+++ b/internal/bundler_tests/bundler_lower_test.go
@@ -100,7 +100,7 @@ func TestLowerExponentiationOperatorNoBundle(t *testing.T) {
UnsupportedJSFeatures: es(2015),
AbsOutputFile: "/out.js",
},
- expectedScanLog: `entry.js: ERROR: Big integer literals are not available in the configured target environment
+ expectedScanLog: `entry.js: WARNING: Big integer literals are not available in the configured target environment and may crash at run-time
`,
})
}
diff --git a/internal/bundler_tests/bundler_packagejson_test.go b/internal/bundler_tests/bundler_packagejson_test.go
index 19aa3ee99ca..8f89eadf2a3 100644
--- a/internal/bundler_tests/bundler_packagejson_test.go
+++ b/internal/bundler_tests/bundler_packagejson_test.go
@@ -3002,3 +3002,36 @@ node_modules/foo/package.json: NOTE: The "default" condition comes earlier and w
`,
})
}
+
+// See: https://github.com/evanw/esbuild/issues/3887
+func TestPackageJsonExportsDefaultWarningIssue3887(t *testing.T) {
+ packagejson_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/entry.js": `
+ import "foo"
+ `,
+ "/node_modules/foo/dist/index.js": `
+ success()
+ `,
+ "/node_modules/foo/package.json": `
+ {
+ "exports": {
+ ".": {
+ "node": "./dist/index.js",
+ "require": "./dist/index.js",
+ "import": "./dist/index.esm.js",
+ "default": "./dist/index.esm.js"
+ }
+ }
+ }
+ `,
+ },
+ entryPaths: []string{"/entry.js"},
+ options: config.Options{
+ Mode: config.ModeBundle,
+ Platform: config.PlatformNode,
+ AbsOutputFile: "/out.js",
+ },
+ debugLogs: true,
+ })
+}
diff --git a/internal/bundler_tests/bundler_test.go b/internal/bundler_tests/bundler_test.go
index 85b3d6e03d8..95b11a800d7 100644
--- a/internal/bundler_tests/bundler_test.go
+++ b/internal/bundler_tests/bundler_test.go
@@ -125,6 +125,9 @@ func (s *suite) __expectBundledImpl(t *testing.T, args bundled, fsKind fs.MockKi
if args.absWorkingDir == "" {
args.absWorkingDir = "/"
}
+ if args.options.AbsOutputDir == "" {
+ args.options.AbsOutputDir = args.absWorkingDir // Match the behavior of the API in this case
+ }
// Handle conversion to Windows-style paths
if fsKind == fs.MockWindows {
diff --git a/internal/bundler_tests/bundler_tsconfig_test.go b/internal/bundler_tests/bundler_tsconfig_test.go
index 50ff57e6989..83c4d3d4de7 100644
--- a/internal/bundler_tests/bundler_tsconfig_test.go
+++ b/internal/bundler_tests/bundler_tsconfig_test.go
@@ -2749,3 +2749,181 @@ func TestTsconfigJsonConfigDirBaseURLInheritedPaths(t *testing.T) {
},
})
}
+
+// https://github.com/evanw/esbuild/issues/3915
+func TestTsconfigStackOverflowYarnPnP(t *testing.T) {
+ tsconfig_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/Users/user/project/entry.jsx": `
+ console.log()
+ `,
+ "/Users/user/project/tsconfig.json": `
+ {
+ "extends": "tsconfigs/config"
+ }
+ `,
+ "/Users/user/project/packages/tsconfigs/package.json": `
+ {
+ "exports": {
+ "./config": "./configs/tsconfig.json"
+ }
+ }
+ `,
+ "/Users/user/project/packages/tsconfigs/configs/tsconfig.json": `
+ {
+ "compilerOptions": {
+ "jsxFactory": "success"
+ }
+ }
+ `,
+ "/Users/user/project/.pnp.data.json": `
+ {
+ "packageRegistryData": [
+ [null, [
+ [null, {
+ "packageLocation": "./",
+ "packageDependencies": [
+ ["tsconfigs", "virtual:some-path"]
+ ],
+ "linkType": "SOFT"
+ }]
+ ]],
+ ["tsconfigs", [
+ ["virtual:some-path", {
+ "packageLocation": "./packages/tsconfigs/",
+ "packageDependencies": [
+ ["tsconfigs", "virtual:some-path"]
+ ],
+ "packagePeers": [],
+ "linkType": "SOFT"
+ }],
+ ["workspace:packages/tsconfigs", {
+ "packageLocation": "./packages/tsconfigs/",
+ "packageDependencies": [
+ ["tsconfigs", "workspace:packages/tsconfigs"]
+ ],
+ "linkType": "SOFT"
+ }]
+ ]]
+ ]
+ }
+ `,
+ },
+ entryPaths: []string{"/Users/user/project/entry.jsx"},
+ absWorkingDir: "/Users/user/project",
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputFile: "/Users/user/project/out.js",
+ },
+ })
+}
+
+func TestTsconfigJsonExtendsArrayIssue3898(t *testing.T) {
+ tsconfig_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/Users/user/project/index.tsx": `
+ import { type SomeType } from 'MUST_KEEP'
+ console.log(<>
+
+ >)
+ `,
+ "/Users/user/project/tsconfig.json": `
+ {
+ "extends": [
+ "./tsconfigs/a.json",
+ "./tsconfigs/b.json",
+ ]
+ }
+ `,
+ "/Users/user/project/tsconfigs/base.json": `
+ {
+ "compilerOptions": {
+ "verbatimModuleSyntax": true,
+ }
+ }
+ `,
+ "/Users/user/project/tsconfigs/a.json": `
+ {
+ "extends": "./base.json",
+ "compilerOptions": {
+ "jsxFactory": "SUCCESS",
+ }
+ }
+ `,
+ "/Users/user/project/tsconfigs/b.json": `
+ {
+ "extends": "./base.json",
+ "compilerOptions": {
+ "jsxFragmentFactory": "WORKS",
+ }
+ }
+ `,
+ },
+ entryPaths: []string{"/Users/user/project/index.tsx"},
+ options: config.Options{
+ Mode: config.ModePassThrough,
+ AbsOutputFile: "/Users/user/project/out.js",
+ JSX: config.JSXOptions{
+ SideEffects: true,
+ },
+ },
+ })
+}
+
+func TestTsconfigDecoratorsUseDefineForClassFieldsFalse(t *testing.T) {
+ tsconfig_suite.expectBundled(t, bundled{
+ files: map[string]string{
+ "/Users/user/project/src/entry.ts": `
+ class Class {
+ }
+ class ClassMethod {
+ foo() {}
+ }
+ class ClassField {
+ foo = 123
+ bar
+ }
+ class ClassAccessor {
+ accessor foo = 123
+ accessor bar
+ }
+ new Class
+ new ClassMethod
+ new ClassField
+ new ClassAccessor
+ `,
+ "/Users/user/project/src/entrywithdec.ts": `
+ @dec class Class {
+ }
+ class ClassMethod {
+ @dec foo() {}
+ }
+ class ClassField {
+ @dec foo = 123
+ @dec bar
+ }
+ class ClassAccessor {
+ @dec accessor foo = 123
+ @dec accessor bar
+ }
+ new Class
+ new ClassMethod
+ new ClassField
+ new ClassAccessor
+ `,
+ "/Users/user/project/src/tsconfig.json": `{
+ "compilerOptions": {
+ "useDefineForClassFields": false
+ }
+ }`,
+ },
+ entryPaths: []string{
+ "/Users/user/project/src/entry.ts",
+ "/Users/user/project/src/entrywithdec.ts",
+ },
+ options: config.Options{
+ Mode: config.ModeBundle,
+ AbsOutputDir: "/Users/user/project/out",
+ },
+ })
+}
diff --git a/internal/bundler_tests/snapshots/snapshots_css.txt b/internal/bundler_tests/snapshots/snapshots_css.txt
index 330d687fda9..226eb8ca8fa 100644
--- a/internal/bundler_tests/snapshots/snapshots_css.txt
+++ b/internal/bundler_tests/snapshots/snapshots_css.txt
@@ -3443,6 +3443,19 @@ c {
background: url(data:image/png;base64,Yy0z);
}
+================================================================================
+TestResolveExtensionsOrderIssue4053
+---------- /out.js ----------
+// Test.web.tsx
+var Test_web_default = "Test.web.tsx";
+
+// node_modules/expo-image/Image.web.tsx
+var Image_web_default = "Image.web.tsx";
+
+// entry.js
+console.log(Test_web_default === "Test.web.tsx");
+console.log(Image_web_default === "Image.web.tsx");
+
================================================================================
TestTextImportURLInCSSText
---------- /out/entry.css ----------
diff --git a/internal/bundler_tests/snapshots/snapshots_dce.txt b/internal/bundler_tests/snapshots/snapshots_dce.txt
index 89dba531d75..877fac98011 100644
--- a/internal/bundler_tests/snapshots/snapshots_dce.txt
+++ b/internal/bundler_tests/snapshots/snapshots_dce.txt
@@ -126,6 +126,12 @@ var y;
var z;
((z) => (z.x = 1, console.log(1, eval("x"))))(z ||= {});
+---------- /out/issue-4055.js ----------
+const variable = !1;
+(function() {
+ eval("var variable = true"), console.log(variable);
+})();
+
================================================================================
TestConstValueInliningNoBundle
---------- /out/top-level.js ----------
@@ -458,10 +464,7 @@ var A_keep = class {
}
}, D_keep = class {
static {
- try {
- } finally {
- foo;
- }
+ foo;
}
};
@@ -892,6 +895,169 @@ testBreak();
testContinue();
testStmts();
+================================================================================
+TestDeadCodeInsideEmptyTry
+---------- /out.js ----------
+// a.js
+var require_a = __commonJS({
+ "a.js"() {
+ }
+});
+
+// b.js
+var require_b = __commonJS({
+ "b.js"() {
+ }
+});
+
+// d.js
+var require_d = __commonJS({
+ "d.js"() {
+ }
+});
+
+// entry.js
+try {
+ foo();
+} catch {
+ require_a();
+} finally {
+ require_b();
+}
+try {
+} catch {
+} finally {
+ require_d();
+}
+
+================================================================================
+TestDeadCodeInsideUnusedCases
+---------- /out.js ----------
+// a.js
+var require_a = __commonJS({
+ "a.js"() {
+ }
+});
+
+// b.js
+var require_b = __commonJS({
+ "b.js"() {
+ }
+});
+
+// entry.js
+switch (x) {
+ case 0:
+ _ = require_a();
+ break;
+ case 1:
+ _ = require_b();
+ break;
+}
+switch (1) {
+ case 0:
+ _ = null;
+ break;
+ case 1:
+ _ = require_a();
+ break;
+ case 1:
+ _ = null;
+ break;
+ case 2:
+ _ = null;
+ break;
+}
+switch (0) {
+ case 1:
+ _ = null;
+ break;
+ default:
+ _ = require_a();
+ break;
+}
+switch (1) {
+ case 1:
+ _ = require_a();
+ break;
+ default:
+ _ = null;
+ break;
+}
+switch (0) {
+ case 1:
+ _ = null;
+ break;
+ default:
+ _ = null;
+ break;
+ case 0:
+ _ = require_a();
+ break;
+}
+switch (1) {
+ case x:
+ _ = require_a();
+ break;
+ case 1:
+ _ = require_b();
+ break;
+ case x:
+ _ = null;
+ break;
+ default:
+ _ = null;
+ break;
+}
+for (const x2 of y)
+ switch (1) {
+ case 0:
+ _ = null;
+ continue;
+ case 1:
+ _ = require_a();
+ continue;
+ case 2:
+ _ = null;
+ continue;
+ }
+x = () => {
+ switch (1) {
+ case 0:
+ _ = null;
+ return;
+ case 1:
+ _ = require_a();
+ return;
+ case 2:
+ _ = null;
+ return;
+ }
+};
+switch ("b") {
+ case "a":
+ _ = null;
+ case "b":
+ _ = require_a();
+ case "c":
+ _ = require_b();
+ break;
+ case "d":
+ _ = null;
+}
+switch ("b") {
+ case "a":
+ _ = null;
+ case "b":
+ case "c":
+ _ = require_a();
+ case "d":
+ _ = require_b();
+ break;
+ case "e":
+ _ = null;
+}
+
================================================================================
TestDisableTreeShaking
---------- /out.js ----------
diff --git a/internal/bundler_tests/snapshots/snapshots_default.txt b/internal/bundler_tests/snapshots/snapshots_default.txt
index 9465f0e104e..a828a23718c 100644
--- a/internal/bundler_tests/snapshots/snapshots_default.txt
+++ b/internal/bundler_tests/snapshots/snapshots_default.txt
@@ -3154,6 +3154,11 @@ c {
/*! Copyright notice 1 */
/*! Copyright notice 2 */
+---------- /out/empty.js ----------
+
+---------- /out/empty.css ----------
+/* empty.css */
+
================================================================================
TestLegalCommentsEscapeSlashScriptAndStyleEndOfFile
---------- /out/entry.js ----------
@@ -3236,6 +3241,11 @@ c {
/* entry.css */
+---------- /out/empty.js ----------
+
+---------- /out/empty.css ----------
+/* empty.css */
+
================================================================================
TestLegalCommentsInline
---------- /out/entry.js ----------
@@ -3272,6 +3282,11 @@ c {
/* entry.css */
+---------- /out/empty.js ----------
+
+---------- /out/empty.css ----------
+/* empty.css */
+
================================================================================
TestLegalCommentsLinked
---------- /out/entry.js.LEGAL.txt ----------
@@ -3312,6 +3327,11 @@ c {
/* entry.css */
/*! For license information please see entry.css.LEGAL.txt */
+---------- /out/empty.js ----------
+
+---------- /out/empty.css ----------
+/* empty.css */
+
================================================================================
TestLegalCommentsManyEndOfFile
---------- /out/entry.js ----------
@@ -3500,6 +3520,11 @@ c {
/* entry.css */
+---------- /out/empty.js ----------
+
+---------- /out/empty.css ----------
+/* empty.css */
+
================================================================================
TestLineLimitMinified
---------- /out/script.js ----------
@@ -3859,10 +3884,10 @@ TestManglePropsLoweredClassFields
---------- /out.js ----------
class Foo {
constructor() {
- __publicField(this, "a", 123);
+ __publicField(this, /* @__KEY__ */ "a", 123);
}
}
-__publicField(Foo, "b", 234);
+__publicField(Foo, /* @__KEY__ */ "b", 234);
Foo.b = new Foo().a;
================================================================================
@@ -5946,23 +5971,23 @@ console.log("cache:", require.cache);
TestRequireParentDirCommonJS
---------- /out.js ----------
// Users/user/project/src/index.js
-var require_src = __commonJS({
+var require_index = __commonJS({
"Users/user/project/src/index.js"(exports, module) {
module.exports = 123;
}
});
// Users/user/project/src/dir/entry.js
-console.log(require_src());
+console.log(require_index());
================================================================================
TestRequireParentDirES6
---------- /out.js ----------
// Users/user/project/src/index.js
-var src_default = 123;
+var index_default = 123;
// Users/user/project/src/dir/entry.js
-console.log(src_default);
+console.log(index_default);
================================================================================
TestRequirePropertyAccessCommonJS
@@ -6147,6 +6172,85 @@ function fn() {
// entry.js
console.log(fn());
+================================================================================
+TestSourceIdentifierNameIndexMultipleEntry
+---------- /Users/user/project/out/home/index.js ----------
+// Users/user/project/node_modules/pkg/index.js
+var require_pkg = __commonJS({
+ "Users/user/project/node_modules/pkg/index.js"(exports) {
+ exports.pkg = true;
+ }
+});
+
+// Users/user/project/common/index.js
+var require_common = __commonJS({
+ "Users/user/project/common/index.js"(exports) {
+ exports.common = true;
+ }
+});
+
+// Users/user/project/home/index.js
+var require_home = __commonJS({
+ "Users/user/project/home/index.js"() {
+ require_home();
+ require_pkg();
+ require_common();
+ }
+});
+export default require_home();
+
+---------- /Users/user/project/out/about/index.js ----------
+// Users/user/project/node_modules/pkg/index.js
+var require_pkg = __commonJS({
+ "Users/user/project/node_modules/pkg/index.js"(exports) {
+ exports.pkg = true;
+ }
+});
+
+// Users/user/project/common/index.js
+var require_common = __commonJS({
+ "Users/user/project/common/index.js"(exports) {
+ exports.common = true;
+ }
+});
+
+// Users/user/project/about/index.js
+var require_about = __commonJS({
+ "Users/user/project/about/index.js"() {
+ require_about();
+ require_pkg();
+ require_common();
+ }
+});
+export default require_about();
+
+================================================================================
+TestSourceIdentifierNameIndexSingleEntry
+---------- /Users/user/project/out/index.js ----------
+// Users/user/project/node_modules/pkg/index.js
+var require_pkg = __commonJS({
+ "Users/user/project/node_modules/pkg/index.js"(exports) {
+ exports.pkg = true;
+ }
+});
+
+// Users/user/project/nested/index.js
+var require_nested = __commonJS({
+ "Users/user/project/nested/index.js"(exports) {
+ exports.nested = true;
+ }
+});
+
+// Users/user/project/index.js
+var require_index = __commonJS({
+ "Users/user/project/index.js"() {
+ require_index();
+ require_pkg();
+ require_nested();
+ }
+});
+export default require_index();
+
================================================================================
TestSourceMap
---------- /Users/user/project/out.js.map ----------
diff --git a/internal/bundler_tests/snapshots/snapshots_loader.txt b/internal/bundler_tests/snapshots/snapshots_loader.txt
index 38164783a0e..4b1c0db1016 100644
--- a/internal/bundler_tests/snapshots/snapshots_loader.txt
+++ b/internal/bundler_tests/snapshots/snapshots_loader.txt
@@ -12,7 +12,7 @@ console.log(require_test());
================================================================================
TestEmptyLoaderCSS
----------- entry.css.map ----------
+---------- /out.js.map ----------
{
"version": 3,
"sources": ["entry.css"],
@@ -21,7 +21,7 @@ TestEmptyLoaderCSS
"names": []
}
----------- entry.css ----------
+---------- /out.js ----------
/* entry.css */
a {
background: url();
@@ -54,13 +54,13 @@ a {
}
},
"outputs": {
- "entry.css.map": {
+ "out.js.map": {
"imports": [],
"exports": [],
"inputs": {},
"bytes": 203
},
- "entry.css": {
+ "out.js": {
"imports": [
{
"path": "",
@@ -81,7 +81,7 @@ a {
================================================================================
TestEmptyLoaderJS
----------- entry.js.map ----------
+---------- /out.js.map ----------
{
"version": 3,
"sources": ["entry.js"],
@@ -90,7 +90,7 @@ TestEmptyLoaderJS
"names": ["def"]
}
----------- entry.js ----------
+---------- /out.js ----------
// b.empty
var require_b = __commonJS({
"b.empty"() {
@@ -154,13 +154,13 @@ console.log(ns, import_c.default, void 0);
}
},
"outputs": {
- "entry.js.map": {
+ "out.js.map": {
"imports": [],
"exports": [],
"inputs": {},
"bytes": 377
},
- "entry.js": {
+ "out.js": {
"imports": [],
"exports": [],
"entryPoint": "entry.js",
@@ -182,7 +182,7 @@ console.log(ns, import_c.default, void 0);
================================================================================
TestExtensionlessLoaderCSS
----------- entry.css ----------
+---------- /out.js ----------
/* what */
.foo {
color: red;
@@ -192,7 +192,7 @@ TestExtensionlessLoaderCSS
================================================================================
TestExtensionlessLoaderJS
----------- entry.js ----------
+---------- /out.js ----------
// what
foo();
@@ -337,6 +337,80 @@ console.log(x);
body {
background: url("../assets/some.file");
}
+---------- metafile.json ----------
+{
+ "inputs": {
+ "Users/user/project/assets/some.file": {
+ "bytes": 5,
+ "imports": []
+ },
+ "Users/user/project/src/entry.js": {
+ "bytes": 63,
+ "imports": [
+ {
+ "path": "Users/user/project/assets/some.file",
+ "kind": "import-statement",
+ "original": "../assets/some.file"
+ }
+ ],
+ "format": "esm"
+ },
+ "Users/user/project/src/entry.css": {
+ "bytes": 64,
+ "imports": [
+ {
+ "path": "Users/user/project/assets/some.file",
+ "kind": "url-token",
+ "original": "../assets/some.file"
+ }
+ ]
+ }
+ },
+ "outputs": {
+ "out/assets/some.file": {
+ "imports": [],
+ "exports": [],
+ "entryPoint": "Users/user/project/assets/some.file",
+ "inputs": {
+ "Users/user/project/assets/some.file": {
+ "bytesInOutput": 5
+ }
+ },
+ "bytes": 5
+ },
+ "out/src/entry.js": {
+ "imports": [
+ {
+ "path": "out/assets/some.file",
+ "kind": "import-statement"
+ }
+ ],
+ "exports": [],
+ "entryPoint": "Users/user/project/src/entry.js",
+ "inputs": {
+ "Users/user/project/src/entry.js": {
+ "bytesInOutput": 53
+ }
+ },
+ "bytes": 88
+ },
+ "out/src/entry.css": {
+ "imports": [
+ {
+ "path": "out/assets/some.file",
+ "kind": "url-token"
+ }
+ ],
+ "entryPoint": "Users/user/project/src/entry.css",
+ "inputs": {
+ "Users/user/project/src/entry.css": {
+ "bytesInOutput": 51
+ }
+ },
+ "bytes": 90
+ }
+ }
+}
================================================================================
TestLoaderCopyWithBundleFromCSS
@@ -968,6 +1042,18 @@ var y_default = "y";
var x_txt = require_x();
console.log(x_txt, y_default);
+================================================================================
+TestLoaderTextUTF8BOM
+---------- /out.js ----------
+// data1.txt
+var data1_default = "text";
+
+// data2.txt
+var data2_default = "text\uFEFF";
+
+// entry.js
+console.log(data1_default, data2_default);
+
================================================================================
TestRequireCustomExtensionBase64
---------- /out.js ----------
@@ -1029,7 +1115,7 @@ console.log(require_test());
================================================================================
TestWithTypeJSONOverrideLoader
----------- entry.js ----------
+---------- /out.js ----------
// foo.js
var foo_default = { "this is json not js": true };
@@ -1038,7 +1124,7 @@ console.log(foo_default);
================================================================================
TestWithTypeJSONOverrideLoaderGlob
----------- entry.js ----------
+---------- /out.js ----------
// foo.js
var foo_exports = {};
__export(foo_exports, {
diff --git a/internal/bundler_tests/snapshots/snapshots_lower.txt b/internal/bundler_tests/snapshots/snapshots_lower.txt
index 83535f82394..1327f08049a 100644
--- a/internal/bundler_tests/snapshots/snapshots_lower.txt
+++ b/internal/bundler_tests/snapshots/snapshots_lower.txt
@@ -1977,6 +1977,39 @@ var strict_default = class {
// entry.js
console.log(loose_default, strict_default);
+================================================================================
+TestLowerExponentiationOperatorNoBundle
+---------- /out.js ----------
+var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
+let tests = {
+ // Exponentiation operator
+ 0: __pow(a, __pow(b, c)),
+ 1: __pow(__pow(a, b), c),
+ // Exponentiation assignment operator
+ 2: a = __pow(a, b),
+ 3: a.b = __pow(a.b, c),
+ 4: a[b] = __pow(a[b], c),
+ 5: (_a = a()).b = __pow(_a.b, c),
+ 6: (_b = a())[b] = __pow(_b[b], c),
+ 7: a[_c = b()] = __pow(a[_c], c),
+ 8: (_d = a())[_e = b()] = __pow(_d[_e], c),
+ // These all should not need capturing (no object identity)
+ 9: a[0] = __pow(a[0], b),
+ 10: a[false] = __pow(a[false], b),
+ 11: a[null] = __pow(a[null], b),
+ 12: a[void 0] = __pow(a[void 0], b),
+ 13: a[/* @__PURE__ */ BigInt("123")] = __pow(a[/* @__PURE__ */ BigInt("123")], b),
+ 14: a[this] = __pow(a[this], b),
+ // These should need capturing (have object identitiy)
+ 15: a[_f = /x/] = __pow(a[_f], b),
+ 16: a[_g = {}] = __pow(a[_g], b),
+ 17: a[_h = []] = __pow(a[_h], b),
+ 18: a[_i = () => {
+ }] = __pow(a[_i], b),
+ 19: a[_j = function() {
+ }] = __pow(a[_j], b)
+};
+
================================================================================
TestLowerExportStarAsNameCollision
---------- /out.js ----------
diff --git a/internal/bundler_tests/snapshots/snapshots_packagejson.txt b/internal/bundler_tests/snapshots/snapshots_packagejson.txt
index 16a55913d12..d35924ea536 100644
--- a/internal/bundler_tests/snapshots/snapshots_packagejson.txt
+++ b/internal/bundler_tests/snapshots/snapshots_packagejson.txt
@@ -585,6 +585,12 @@ TestPackageJsonExportsDefaultOverImportAndRequire
// Users/user/project/node_modules/pkg/default.js
console.log("SUCCESS");
+================================================================================
+TestPackageJsonExportsDefaultWarningIssue3887
+---------- /out.js ----------
+// node_modules/foo/dist/index.js
+success();
+
================================================================================
TestPackageJsonExportsEntryPointImportOverRequire
---------- /out.js ----------
@@ -677,10 +683,10 @@ TestPackageJsonImportSelfUsingImport
var foo_import_default = "foo";
// Users/user/project/src/index.js
-var src_default = "index";
-console.log(src_default, foo_import_default);
+var index_default = "index";
+console.log(index_default, foo_import_default);
export {
- src_default as default
+ index_default as default
};
================================================================================
@@ -690,10 +696,10 @@ TestPackageJsonImportSelfUsingImportScoped
var foo_import_default = "foo";
// Users/user/project/src/index.js
-var src_default = "index";
-console.log(src_default, foo_import_default);
+var index_default = "index";
+console.log(index_default, foo_import_default);
export {
- src_default as default
+ index_default as default
};
================================================================================
@@ -707,16 +713,16 @@ var require_foo_require = __commonJS({
});
// Users/user/project/src/index.js
-var require_src = __commonJS({
+var require_index = __commonJS({
"Users/user/project/src/index.js"(exports, module) {
module.exports = "index";
console.log(
- require_src(),
+ require_index(),
require_foo_require()
);
}
});
-export default require_src();
+export default require_index();
================================================================================
TestPackageJsonImportSelfUsingRequireScoped
@@ -729,16 +735,16 @@ var require_foo_require = __commonJS({
});
// Users/user/project/src/index.js
-var require_src = __commonJS({
+var require_index = __commonJS({
"Users/user/project/src/index.js"(exports, module) {
module.exports = "index";
console.log(
- require_src(),
+ require_index(),
require_foo_require()
);
}
});
-export default require_src();
+export default require_index();
================================================================================
TestPackageJsonImports
diff --git a/internal/bundler_tests/snapshots/snapshots_splitting.txt b/internal/bundler_tests/snapshots/snapshots_splitting.txt
index 5493fb756b1..071fb909084 100644
--- a/internal/bundler_tests/snapshots/snapshots_splitting.txt
+++ b/internal/bundler_tests/snapshots/snapshots_splitting.txt
@@ -25,23 +25,23 @@ var init_a = __esm({
var B;
var init_b = __esm({
"src/b.js"() {
- B = async () => (await Promise.resolve().then(() => (init_src(), src_exports))).A;
+ B = async () => (await Promise.resolve().then(() => (init_index(), index_exports))).A;
}
});
// src/index.js
-var src_exports = {};
-__export(src_exports, {
+var index_exports = {};
+__export(index_exports, {
A: () => A,
B: () => B
});
-var init_src = __esm({
+var init_index = __esm({
"src/index.js"() {
init_a();
init_b();
}
});
-init_src();
+init_index();
export {
A,
B
diff --git a/internal/bundler_tests/snapshots/snapshots_ts.txt b/internal/bundler_tests/snapshots/snapshots_ts.txt
index 254df136a3b..b0cb8b80876 100644
--- a/internal/bundler_tests/snapshots/snapshots_ts.txt
+++ b/internal/bundler_tests/snapshots/snapshots_ts.txt
@@ -977,7 +977,7 @@ __decorateClass([
], Foo.prototype, "prop1", 2);
__decorateClass([
dec(2)
-], Foo.prototype, "a", 2);
+], Foo.prototype, /* @__KEY__ */ "a", 2);
__decorateClass([
dec(3)
], Foo.prototype, "prop3", 2);
@@ -1008,7 +1008,7 @@ __decorateClass([
], Foo.prototype, "prop1", 2);
__decorateClass([
dec(2)
-], Foo.prototype, "a", 2);
+], Foo.prototype, /* @__KEY__ */ "a", 2);
__decorateClass([
dec(3)
], Foo.prototype, "prop3", 2);
@@ -1045,7 +1045,7 @@ __decorateClass([
], Foo.prototype, "prop1", 1);
__decorateClass([
dec(2)
-], Foo.prototype, "a", 1);
+], Foo.prototype, /* @__KEY__ */ "a", 1);
__decorateClass([
dec(3)
], Foo.prototype, "prop3", 1);
@@ -1088,7 +1088,7 @@ __decorateClass([
], Foo, "prop1", 2);
__decorateClass([
dec(2)
-], Foo, "a", 2);
+], Foo, /* @__KEY__ */ "a", 2);
__decorateClass([
dec(3)
], Foo, "prop3", 2);
@@ -1119,7 +1119,7 @@ __decorateClass([
], Foo, "prop1", 2);
__decorateClass([
dec(2)
-], Foo, "a", 2);
+], Foo, /* @__KEY__ */ "a", 2);
__decorateClass([
dec(3)
], Foo, "prop3", 2);
@@ -1156,7 +1156,7 @@ __decorateClass([
], Foo, "prop1", 1);
__decorateClass([
dec(2)
-], Foo, "a", 1);
+], Foo, /* @__KEY__ */ "a", 1);
__decorateClass([
dec(3)
], Foo, "prop3", 1);
diff --git a/internal/bundler_tests/snapshots/snapshots_tsconfig.txt b/internal/bundler_tests/snapshots/snapshots_tsconfig.txt
index 1e05b86c4f0..a83bf1a74d4 100644
--- a/internal/bundler_tests/snapshots/snapshots_tsconfig.txt
+++ b/internal/bundler_tests/snapshots/snapshots_tsconfig.txt
@@ -82,6 +82,82 @@ var foo = 123;
// Users/user/project/src/entry.ts
console.log(foo);
+================================================================================
+TestTsconfigDecoratorsUseDefineForClassFieldsFalse
+---------- /Users/user/project/out/entry.js ----------
+// Users/user/project/src/entry.ts
+var Class = class {
+};
+var ClassMethod = class {
+ foo() {
+ }
+};
+var ClassField = class {
+ constructor() {
+ this.foo = 123;
+ }
+};
+var ClassAccessor = class {
+ accessor foo = 123;
+ accessor bar;
+};
+new Class();
+new ClassMethod();
+new ClassField();
+new ClassAccessor();
+
+---------- /Users/user/project/out/entrywithdec.js ----------
+// Users/user/project/src/entrywithdec.ts
+var _Class_decorators, _init;
+_Class_decorators = [dec];
+var Class = class {
+};
+_init = __decoratorStart(null);
+Class = __decorateElement(_init, 0, "Class", _Class_decorators, Class);
+__runInitializers(_init, 1, Class);
+var _foo_dec, _init2;
+_foo_dec = [dec];
+var ClassMethod = class {
+ constructor() {
+ __runInitializers(_init2, 5, this);
+ }
+ foo() {
+ }
+};
+_init2 = __decoratorStart(null);
+__decorateElement(_init2, 1, "foo", _foo_dec, ClassMethod);
+__decoratorMetadata(_init2, ClassMethod);
+var _bar_dec, _foo_dec2, _init3;
+_foo_dec2 = [dec], _bar_dec = [dec];
+var ClassField = class {
+ constructor() {
+ this.foo = __runInitializers(_init3, 8, this, 123), __runInitializers(_init3, 11, this);
+ this.bar = __runInitializers(_init3, 12, this), __runInitializers(_init3, 15, this);
+ }
+};
+_init3 = __decoratorStart(null);
+__decorateElement(_init3, 5, "foo", _foo_dec2, ClassField);
+__decorateElement(_init3, 5, "bar", _bar_dec, ClassField);
+__decoratorMetadata(_init3, ClassField);
+var _bar_dec2, _foo_dec3, _init4, _foo, _bar;
+_foo_dec3 = [dec], _bar_dec2 = [dec];
+var ClassAccessor = class {
+ constructor() {
+ __privateAdd(this, _foo, __runInitializers(_init4, 8, this, 123)), __runInitializers(_init4, 11, this);
+ __privateAdd(this, _bar, __runInitializers(_init4, 12, this)), __runInitializers(_init4, 15, this);
+ }
+};
+_init4 = __decoratorStart(null);
+_foo = new WeakMap();
+_bar = new WeakMap();
+__decorateElement(_init4, 4, "foo", _foo_dec3, ClassAccessor, _foo);
+__decorateElement(_init4, 4, "bar", _bar_dec2, ClassAccessor, _bar);
+__decoratorMetadata(_init4, ClassAccessor);
+new Class();
+new ClassMethod();
+new ClassField();
+new ClassAccessor();
+
================================================================================
TestTsconfigExtendsArray
---------- /Users/user/project/out/main.js ----------
@@ -290,6 +366,12 @@ TestTsconfigJsonExtendsAbsolute
// Users/user/project/entry.jsx
console.log(/* @__PURE__ */ baseFactory("div", null), /* @__PURE__ */ baseFactory(derivedFragment, null));
+================================================================================
+TestTsconfigJsonExtendsArrayIssue3898
+---------- /Users/user/project/out.js ----------
+import {} from "MUST_KEEP";
+console.log(SUCCESS(WORKS, null, SUCCESS("div", null)));
+
================================================================================
TestTsconfigJsonExtendsLoop
---------- /out.js ----------
@@ -724,6 +806,12 @@ TestTsconfigRemoveUnusedImports
// Users/user/project/src/entry.ts
console.log(1);
+================================================================================
+TestTsconfigStackOverflowYarnPnP
+---------- /Users/user/project/out.js ----------
+// entry.jsx
+console.log(/* @__PURE__ */ success("div", null));
+
================================================================================
TestTsconfigUnrecognizedTargetWarning
---------- /Users/user/project/out.js ----------
diff --git a/internal/compat/css_table.go b/internal/compat/css_table.go
index 1cd717e0a54..0ee1001ac0e 100644
--- a/internal/compat/css_table.go
+++ b/internal/compat/css_table.go
@@ -198,13 +198,12 @@ var cssPrefixTable = map[css_ast.D][]prefixData{
{engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
{engine: Edge, prefix: MsPrefix, withoutPrefix: v{15, 0, 0}},
{engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}},
- {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{14, 0, 0}},
{engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}},
- {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{14, 0, 0}},
+ {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{5, 0, 0}},
},
css_ast.DBoxDecorationBreak: {
- {engine: Chrome, prefix: WebkitPrefix},
- {engine: Edge, prefix: WebkitPrefix},
+ {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{130, 0, 0}},
+ {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{130, 0, 0}},
{engine: IOS, prefix: WebkitPrefix},
{engine: Opera, prefix: WebkitPrefix},
{engine: Safari, prefix: WebkitPrefix},
diff --git a/internal/compat/js_table.go b/internal/compat/js_table.go
index f3461ab1d9f..18f2bbe1c6d 100644
--- a/internal/compat/js_table.go
+++ b/internal/compat/js_table.go
@@ -813,15 +813,16 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{
Safari: {{start: v{12, 0, 0}}},
},
RegexpUnicodePropertyEscapes: {
- // Note: The latest version of "Chrome" failed this test: RegExp Unicode Property Escapes: Unicode 15.1
- // Note: The latest version of "Firefox" failed this test: RegExp Unicode Property Escapes: Unicode 15.1
+ // Note: The latest version of "Chrome" failed this test: RegExp Unicode Property Escapes: Unicode 16.0
+ // Note: The latest version of "Edge" failed this test: RegExp Unicode Property Escapes: Unicode 16.0
// Note: The latest version of "Hermes" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11
// Note: The latest version of "IE" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11
- // Note: The latest version of "IOS" failed this test: RegExp Unicode Property Escapes: Unicode 15.1
+ // Note: The latest version of "IOS" failed this test: RegExp Unicode Property Escapes: Unicode 16.0
// Note: The latest version of "Rhino" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11
- // Note: The latest version of "Safari" failed this test: RegExp Unicode Property Escapes: Unicode 15.1
- ES: {{start: v{2018, 0, 0}}},
- Node: {{start: v{18, 20, 0}, end: v{19, 0, 0}}, {start: v{20, 12, 0}, end: v{21, 0, 0}}, {start: v{21, 3, 0}}},
+ // Note: The latest version of "Safari" failed this test: RegExp Unicode Property Escapes: Unicode 16.0
+ ES: {{start: v{2018, 0, 0}}},
+ Firefox: {{start: v{134, 0, 0}}},
+ Node: {{start: v{22, 12, 0}, end: v{23, 0, 0}}, {start: v{23, 3, 0}}},
},
RestArgument: {
// Note: The latest version of "Hermes" failed this test: rest parameters: function 'length' property
diff --git a/internal/config/config.go b/internal/config/config.go
index 615d6882eaa..ebcb170356c 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -256,6 +256,30 @@ func (loader Loader) CanHaveSourceMap() bool {
return false
}
+func LoaderFromFileExtension(extensionToLoader map[string]Loader, base string) Loader {
+ // Pick the loader with the longest matching extension. So if there's an
+ // extension for ".css" and for ".module.css", we want to match the one for
+ // ".module.css" before the one for ".css".
+ if i := strings.IndexByte(base, '.'); i != -1 {
+ for {
+ if loader, ok := extensionToLoader[base[i:]]; ok {
+ return loader
+ }
+ base = base[i+1:]
+ i = strings.IndexByte(base, '.')
+ if i == -1 {
+ break
+ }
+ }
+ } else {
+ // If there's no extension, explicitly check for an extensionless loader
+ if loader, ok := extensionToLoader[""]; ok {
+ return loader
+ }
+ }
+ return LoaderNone
+}
+
type Format uint8
const (
diff --git a/internal/config/globals.go b/internal/config/globals.go
index 4a77c0267c0..1bbc9786f8c 100644
--- a/internal/config/globals.go
+++ b/internal/config/globals.go
@@ -2,7 +2,6 @@ package config
import (
"math"
- "strings"
"sync"
"github.com/evanw/esbuild/internal/ast"
@@ -868,6 +867,7 @@ type DefineExpr struct {
}
type DefineData struct {
+ KeyParts []string
DefineExpr *DefineExpr
Flags DefineFlags
}
@@ -905,14 +905,9 @@ func mergeDefineData(old DefineData, new DefineData) DefineData {
return new
}
-type DotDefine struct {
- Data DefineData
- Parts []string
-}
-
type ProcessedDefines struct {
IdentifierDefines map[string]DefineData
- DotDefines map[string][]DotDefine
+ DotDefines map[string][]DefineData
}
// This transformation is expensive, so we only want to do it once. Make sure
@@ -920,7 +915,7 @@ type ProcessedDefines struct {
// doesn't have an efficient way to copy a map and the overhead of copying
// all of the properties into a new map once for every new parser noticeably
// slows down our benchmarks.
-func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines {
+func ProcessDefines(userDefines []DefineData) ProcessedDefines {
// Optimization: reuse known globals if there are no user-specified defines
hasUserDefines := len(userDefines) != 0
if !hasUserDefines {
@@ -934,7 +929,7 @@ func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines {
result := ProcessedDefines{
IdentifierDefines: make(map[string]DefineData),
- DotDefines: make(map[string][]DotDefine),
+ DotDefines: make(map[string][]DefineData),
}
// Mark these property accesses as free of side effects. That means they can
@@ -956,7 +951,7 @@ func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines {
flags |= IsSymbolInstance
}
- result.DotDefines[tail] = append(result.DotDefines[tail], DotDefine{Parts: parts, Data: DefineData{Flags: flags}})
+ result.DotDefines[tail] = append(result.DotDefines[tail], DefineData{KeyParts: parts, Flags: flags})
}
}
@@ -973,31 +968,29 @@ func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines {
// Then copy the user-specified defines in afterwards, which will overwrite
// any known globals above.
- for key, data := range userDefines {
- parts := strings.Split(key, ".")
-
+ for _, data := range userDefines {
// Identifier defines are special-cased
- if len(parts) == 1 {
- result.IdentifierDefines[key] = mergeDefineData(result.IdentifierDefines[key], data)
+ if len(data.KeyParts) == 1 {
+ name := data.KeyParts[0]
+ result.IdentifierDefines[name] = mergeDefineData(result.IdentifierDefines[name], data)
continue
}
- tail := parts[len(parts)-1]
+ tail := data.KeyParts[len(data.KeyParts)-1]
dotDefines := result.DotDefines[tail]
found := false
// Try to merge with existing dot defines first
for i, define := range dotDefines {
- if helpers.StringArraysEqual(parts, define.Parts) {
- define := &dotDefines[i]
- define.Data = mergeDefineData(define.Data, data)
+ if helpers.StringArraysEqual(data.KeyParts, define.KeyParts) {
+ dotDefines[i] = mergeDefineData(dotDefines[i], data)
found = true
break
}
}
if !found {
- dotDefines = append(dotDefines, DotDefine{Parts: parts, Data: data})
+ dotDefines = append(dotDefines, data)
}
result.DotDefines[tail] = dotDefines
}
diff --git a/internal/css_ast/css_ast.go b/internal/css_ast/css_ast.go
index b7a17316210..69d02e3f37d 100644
--- a/internal/css_ast/css_ast.go
+++ b/internal/css_ast/css_ast.go
@@ -788,34 +788,40 @@ func HashComplexSelectors(hash uint32, selectors []ComplexSelector) uint32 {
return hash
}
-func (s ComplexSelector) CloneWithoutLeadingCombinator() ComplexSelector {
+func (s ComplexSelector) Clone() ComplexSelector {
clone := ComplexSelector{Selectors: make([]CompoundSelector, len(s.Selectors))}
for i, sel := range s.Selectors {
- if i == 0 {
- sel.Combinator = Combinator{}
- }
clone.Selectors[i] = sel.Clone()
}
return clone
}
-func (sel ComplexSelector) IsRelative() bool {
- if sel.Selectors[0].Combinator.Byte == 0 {
- for _, inner := range sel.Selectors {
- if inner.HasNestingSelector() {
- return false
- }
- for _, ss := range inner.SubclassSelectors {
- if pseudo, ok := ss.Data.(*SSPseudoClassWithSelectorList); ok {
- for _, nested := range pseudo.Selectors {
- if !nested.IsRelative() {
- return false
- }
+func (sel ComplexSelector) ContainsNestingCombinator() bool {
+ for _, inner := range sel.Selectors {
+ if len(inner.NestingSelectorLocs) > 0 {
+ return true
+ }
+ for _, ss := range inner.SubclassSelectors {
+ if pseudo, ok := ss.Data.(*SSPseudoClassWithSelectorList); ok {
+ for _, nested := range pseudo.Selectors {
+ if nested.ContainsNestingCombinator() {
+ return true
}
}
}
}
}
+ return false
+}
+
+func (sel ComplexSelector) IsRelative() bool {
+ // https://www.w3.org/TR/css-nesting-1/#syntax
+ // "If a selector in the does not start with a
+ // combinator but does contain the nesting selector, it is interpreted
+ // as a non-relative selector."
+ if sel.Selectors[0].Combinator.Byte == 0 && sel.ContainsNestingCombinator() {
+ return false
+ }
return true
}
@@ -861,7 +867,7 @@ func (a ComplexSelector) Equal(b ComplexSelector, check *CrossFileEqualityCheck)
for i, ai := range a.Selectors {
bi := b.Selectors[i]
- if ai.HasNestingSelector() != bi.HasNestingSelector() || ai.Combinator.Byte != bi.Combinator.Byte {
+ if len(ai.NestingSelectorLocs) != len(bi.NestingSelectorLocs) || ai.Combinator.Byte != bi.Combinator.Byte {
return false
}
@@ -890,25 +896,21 @@ type Combinator struct {
}
type CompoundSelector struct {
- TypeSelector *NamespacedName
- SubclassSelectors []SubclassSelector
- NestingSelectorLoc ast.Index32 // "&"
- Combinator Combinator // Optional, may be 0
+ TypeSelector *NamespacedName
+ SubclassSelectors []SubclassSelector
+ NestingSelectorLocs []logger.Loc // "&" vs. "&&" is different specificity
+ Combinator Combinator // Optional, may be 0
// If this is true, this is a "&" that was generated by a bare ":local" or ":global"
WasEmptyFromLocalOrGlobal bool
}
-func (sel *CompoundSelector) HasNestingSelector() bool {
- return sel.NestingSelectorLoc.IsValid()
-}
-
func (sel CompoundSelector) IsSingleAmpersand() bool {
- return sel.HasNestingSelector() && sel.Combinator.Byte == 0 && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0
+ return len(sel.NestingSelectorLocs) == 1 && sel.Combinator.Byte == 0 && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0
}
func (sel CompoundSelector) IsInvalidBecauseEmpty() bool {
- return !sel.HasNestingSelector() && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0
+ return len(sel.NestingSelectorLocs) == 0 && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0
}
func (sel CompoundSelector) Range() (r logger.Range) {
@@ -918,8 +920,8 @@ func (sel CompoundSelector) Range() (r logger.Range) {
if sel.TypeSelector != nil {
r.ExpandBy(sel.TypeSelector.Range())
}
- if sel.NestingSelectorLoc.IsValid() {
- r.ExpandBy(logger.Range{Loc: logger.Loc{Start: int32(sel.NestingSelectorLoc.GetIndex())}, Len: 1})
+ for _, loc := range sel.NestingSelectorLocs {
+ r.ExpandBy(logger.Range{Loc: loc, Len: 1})
}
if len(sel.SubclassSelectors) > 0 {
for _, ss := range sel.SubclassSelectors {
@@ -1199,7 +1201,7 @@ func (ss *SSPseudoClassWithSelectorList) Clone() SS {
clone := *ss
clone.Selectors = make([]ComplexSelector, len(ss.Selectors))
for i, sel := range ss.Selectors {
- clone.Selectors[i] = sel.CloneWithoutLeadingCombinator()
+ clone.Selectors[i] = sel.Clone()
}
return &clone
}
diff --git a/internal/css_parser/css_nesting.go b/internal/css_parser/css_nesting.go
index a95da135956..f2991f19636 100644
--- a/internal/css_parser/css_nesting.go
+++ b/internal/css_parser/css_nesting.go
@@ -3,7 +3,6 @@ package css_parser
import (
"fmt"
- "github.com/evanw/esbuild/internal/ast"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/logger"
@@ -118,76 +117,13 @@ func (p *parser) lowerNestingInRuleWithContext(rule css_ast.Rule, context *lower
// "a, b { &.c, & d, e & {} }" => ":is(a, b).c, :is(a, b) d, e :is(a, b) {}"
// Pass 1: Canonicalize and analyze our selectors
- canUseGroupDescendantCombinator := true // Can we do "parent «space» :is(...selectors)"?
- canUseGroupSubSelector := true // Can we do "parent«nospace»:is(...selectors)"?
- var commonLeadingCombinator css_ast.Combinator
for i := range r.Selectors {
sel := &r.Selectors[i]
// Inject the implicit "&" now for simplicity later on
if sel.IsRelative() {
- sel.Selectors = append([]css_ast.CompoundSelector{{NestingSelectorLoc: ast.MakeIndex32(uint32(rule.Loc.Start))}}, sel.Selectors...)
+ sel.Selectors = append([]css_ast.CompoundSelector{{NestingSelectorLocs: []logger.Loc{rule.Loc}}}, sel.Selectors...)
}
-
- // Pseudo-elements aren't supported by ":is" (i.e. ":is(div, div::before)"
- // is the same as ":is(div)") so we need to avoid generating ":is" if a
- // pseudo-element is present.
- if sel.UsesPseudoElement() {
- canUseGroupDescendantCombinator = false
- canUseGroupSubSelector = false
- }
-
- // Are all children of the form "& «something»"?
- if len(sel.Selectors) < 2 || !sel.Selectors[0].IsSingleAmpersand() {
- canUseGroupDescendantCombinator = false
- } else {
- // If all children are of the form "& «COMBINATOR» «something»", is «COMBINATOR» the same in all cases?
- var combinator css_ast.Combinator
- if len(sel.Selectors) >= 2 {
- combinator = sel.Selectors[1].Combinator
- }
- if i == 0 {
- commonLeadingCombinator = combinator
- } else if commonLeadingCombinator.Byte != combinator.Byte {
- canUseGroupDescendantCombinator = false
- }
- }
-
- // Are all children of the form "&«something»"?
- if first := sel.Selectors[0]; !first.HasNestingSelector() || first.IsSingleAmpersand() {
- canUseGroupSubSelector = false
- }
- }
-
- // Avoid generating ":is" if it's not supported
- if p.options.unsupportedCSSFeatures.Has(compat.IsPseudoClass) && len(r.Selectors) > 1 {
- canUseGroupDescendantCombinator = false
- canUseGroupSubSelector = false
- }
-
- // Try to apply simplifications for shorter output
- if canUseGroupDescendantCombinator {
- // "& a, & b {}" => "& :is(a, b) {}"
- // "& > a, & > b {}" => "& > :is(a, b) {}"
- nestingSelectorLoc := r.Selectors[0].Selectors[0].NestingSelectorLoc
- for i := range r.Selectors {
- sel := &r.Selectors[i]
- sel.Selectors = sel.Selectors[1:]
- }
- merged := p.multipleComplexSelectorsToSingleComplexSelector(r.Selectors)(rule.Loc)
- merged.Selectors = append([]css_ast.CompoundSelector{{NestingSelectorLoc: nestingSelectorLoc}}, merged.Selectors...)
- r.Selectors = []css_ast.ComplexSelector{merged}
- } else if canUseGroupSubSelector {
- // "&a, &b {}" => "&:is(a, b) {}"
- // "> &a, > &b {}" => "> &:is(a, b) {}"
- nestingSelectorLoc := r.Selectors[0].Selectors[0].NestingSelectorLoc
- for i := range r.Selectors {
- sel := &r.Selectors[i]
- sel.Selectors[0].NestingSelectorLoc = ast.Index32{}
- }
- merged := p.multipleComplexSelectorsToSingleComplexSelector(r.Selectors)(rule.Loc)
- merged.Selectors[0].NestingSelectorLoc = nestingSelectorLoc
- r.Selectors = []css_ast.ComplexSelector{merged}
}
// Pass 2: Substitute "&" for the parent selector
@@ -351,9 +287,7 @@ func (p *parser) substituteAmpersandsInCompoundSelector(
results []css_ast.CompoundSelector,
strip leadingCombinatorStrip,
) []css_ast.CompoundSelector {
- if sel.HasNestingSelector() {
- nestingSelectorLoc := logger.Loc{Start: int32(sel.NestingSelectorLoc.GetIndex())}
- sel.NestingSelectorLoc = ast.Index32{}
+ for _, nestingSelectorLoc := range sel.NestingSelectorLocs {
replacement := replacementFn(nestingSelectorLoc)
// Convert the replacement to a single compound selector
@@ -383,7 +317,7 @@ func (p *parser) substituteAmpersandsInCompoundSelector(
Range: logger.Range{Loc: nestingSelectorLoc},
Data: &css_ast.SSPseudoClassWithSelectorList{
Kind: css_ast.PseudoClassIs,
- Selectors: []css_ast.ComplexSelector{replacement.CloneWithoutLeadingCombinator()},
+ Selectors: []css_ast.ComplexSelector{replacement.Clone()},
},
}},
}
@@ -414,6 +348,7 @@ func (p *parser) substituteAmpersandsInCompoundSelector(
sel.SubclassSelectors = append(subclassSelectorPrefix, sel.SubclassSelectors...)
}
}
+ sel.NestingSelectorLocs = nil
// "div { :is(&.foo) {} }" => ":is(div.foo) {}"
for _, ss := range sel.SubclassSelectors {
@@ -449,7 +384,7 @@ func (p *parser) multipleComplexSelectorsToSingleComplexSelector(selectors []css
for i, sel := range selectors {
// "> a, > b" => "> :is(a, b)" (the caller should have already checked that all leading combinators are the same)
leadingCombinator = sel.Selectors[0].Combinator
- clones[i] = sel.CloneWithoutLeadingCombinator()
+ clones[i] = sel.Clone()
}
return func(loc logger.Loc) css_ast.ComplexSelector {
diff --git a/internal/css_parser/css_parser.go b/internal/css_parser/css_parser.go
index 131ec5edfcd..718c53ce184 100644
--- a/internal/css_parser/css_parser.go
+++ b/internal/css_parser/css_parser.go
@@ -898,7 +898,7 @@ var nonDeprecatedElementsSupportedByIE7 = map[string]bool{
func isSafeSelectors(complexSelectors []css_ast.ComplexSelector) bool {
for _, complex := range complexSelectors {
for _, compound := range complex.Selectors {
- if compound.HasNestingSelector() {
+ if len(compound.NestingSelectorLocs) > 0 {
// Bail because this is an extension: https://drafts.csswg.org/css-nesting-1/
return false
}
@@ -2088,8 +2088,8 @@ func (p *parser) parseSelectorRule(isTopLevel bool, opts parseSelectorOpts) css_
composesContext.problemRange = logger.Range{Loc: first.Combinator.Loc, Len: 1}
} else if first.TypeSelector != nil {
composesContext.problemRange = first.TypeSelector.Range()
- } else if first.NestingSelectorLoc.IsValid() {
- composesContext.problemRange = logger.Range{Loc: logger.Loc{Start: int32(first.NestingSelectorLoc.GetIndex())}, Len: 1}
+ } else if len(first.NestingSelectorLocs) > 0 {
+ composesContext.problemRange = logger.Range{Loc: first.NestingSelectorLocs[0], Len: 1}
} else {
for i, ss := range first.SubclassSelectors {
class, ok := ss.Data.(*css_ast.SSClass)
diff --git a/internal/css_parser/css_parser_selector.go b/internal/css_parser/css_parser_selector.go
index d766e8ec3f8..819086f5d2b 100644
--- a/internal/css_parser/css_parser_selector.go
+++ b/internal/css_parser/css_parser_selector.go
@@ -4,7 +4,6 @@ import (
"fmt"
"strings"
- "github.com/evanw/esbuild/internal/ast"
"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/css_lexer"
"github.com/evanw/esbuild/internal/logger"
@@ -69,7 +68,18 @@ func (p *parser) parseSelectorList(opts parseSelectorOpts) (list []css_ast.Compl
}
}
- if p.options.minifySyntax {
+ // Remove the leading ampersand when minifying and it can be implied:
+ //
+ // "a { & b {} }" => "a { b {} }"
+ //
+ // It can't be implied if it's not at the beginning, if there are multiple of
+ // them, or if the selector list is inside of a pseudo-class selector:
+ //
+ // "a { b & {} }"
+ // "a { & b & {} }"
+ // "a { :has(& b) {} }"
+ //
+ if p.options.minifySyntax && !opts.stopOnCloseParen {
for i := 1; i < len(list); i++ {
if analyzeLeadingAmpersand(list[i], opts.isDeclarationContext) != cannotRemoveLeadingAmpersand {
list[i].Selectors = list[i].Selectors[1:]
@@ -82,7 +92,7 @@ func (p *parser) parseSelectorList(opts parseSelectorOpts) (list []css_ast.Compl
case canRemoveLeadingAmpersandIfNotFirst:
for i := 1; i < len(list); i++ {
- if sel := list[i].Selectors[0]; !sel.HasNestingSelector() && (sel.Combinator.Byte != 0 || sel.TypeSelector == nil) {
+ if sel := list[i].Selectors[0]; len(sel.NestingSelectorLocs) == 0 && (sel.Combinator.Byte != 0 || sel.TypeSelector == nil) {
list[0].Selectors = list[0].Selectors[1:]
list[0], list[i] = list[i], list[0]
break
@@ -97,8 +107,8 @@ func (p *parser) parseSelectorList(opts parseSelectorOpts) (list []css_ast.Compl
func mergeCompoundSelectors(target *css_ast.CompoundSelector, source css_ast.CompoundSelector) {
// ".foo:local(&)" => "&.foo"
- if source.HasNestingSelector() && !target.HasNestingSelector() {
- target.NestingSelectorLoc = source.NestingSelectorLoc
+ if len(source.NestingSelectorLocs) > 0 && len(target.NestingSelectorLocs) == 0 {
+ target.NestingSelectorLocs = source.NestingSelectorLocs
}
if source.TypeSelector != nil {
@@ -210,7 +220,7 @@ func (p *parser) flattenLocalAndGlobalSelectors(list []css_ast.ComplexSelector,
if len(selectors) == 0 {
// Treat a bare ":global" or ":local" as a bare "&" nesting selector
selectors = append(selectors, css_ast.CompoundSelector{
- NestingSelectorLoc: ast.MakeIndex32(uint32(sel.Selectors[0].Range().Loc.Start)),
+ NestingSelectorLocs: []logger.Loc{sel.Selectors[0].Range().Loc},
WasEmptyFromLocalOrGlobal: true,
})
@@ -235,7 +245,7 @@ const (
func analyzeLeadingAmpersand(sel css_ast.ComplexSelector, isDeclarationContext bool) leadingAmpersand {
if len(sel.Selectors) > 1 {
if first := sel.Selectors[0]; first.IsSingleAmpersand() {
- if second := sel.Selectors[1]; second.Combinator.Byte == 0 && second.HasNestingSelector() {
+ if second := sel.Selectors[1]; second.Combinator.Byte == 0 && len(second.NestingSelectorLocs) > 0 {
// ".foo { & &.bar {} }" => ".foo { & &.bar {} }"
} else if second.Combinator.Byte != 0 || second.TypeSelector == nil || !isDeclarationContext {
// "& + div {}" => "+ div {}"
@@ -330,7 +340,7 @@ func (p *parser) parseCompoundSelector(opts parseComplexSelectorOpts) (sel css_a
hasLeadingNestingSelector := p.peek(css_lexer.TDelimAmpersand)
if hasLeadingNestingSelector {
p.nestingIsPresent = true
- sel.NestingSelectorLoc = ast.MakeIndex32(uint32(startLoc.Start))
+ sel.NestingSelectorLocs = append(sel.NestingSelectorLocs, startLoc)
p.advance()
}
@@ -445,7 +455,7 @@ subclassSelectors:
case css_lexer.TDelimAmpersand:
// This is an extension: https://drafts.csswg.org/css-nesting-1/
p.nestingIsPresent = true
- sel.NestingSelectorLoc = ast.MakeIndex32(uint32(subclassToken.Range.Loc.Start))
+ sel.NestingSelectorLocs = append(sel.NestingSelectorLocs, subclassToken.Range.Loc)
p.advance()
default:
diff --git a/internal/css_parser/css_parser_test.go b/internal/css_parser/css_parser_test.go
index c4daa21fd9e..3229b4daea0 100644
--- a/internal/css_parser/css_parser_test.go
+++ b/internal/css_parser/css_parser_test.go
@@ -1049,7 +1049,7 @@ func TestNestedSelector(t *testing.T) {
expectPrinted(t, "a { &a|b {} }", "a {\n &a|b {\n }\n}\n", ": WARNING: Cannot use type selector \"a|b\" directly after nesting selector \"&\"\n"+sassWarningWrap)
expectPrinted(t, "a { &[b] {} }", "a {\n &[b] {\n }\n}\n", "")
- expectPrinted(t, "a { && {} }", "a {\n & {\n }\n}\n", "")
+ expectPrinted(t, "a { && {} }", "a {\n && {\n }\n}\n", "")
expectPrinted(t, "a { & + & {} }", "a {\n & + & {\n }\n}\n", "")
expectPrinted(t, "a { & > & {} }", "a {\n & > & {\n }\n}\n", "")
expectPrinted(t, "a { & ~ & {} }", "a {\n & ~ & {\n }\n}\n", "")
@@ -1127,6 +1127,8 @@ func TestNestedSelector(t *testing.T) {
expectPrintedMangle(t, "div { .x & { color: red } }", "div {\n .x & {\n color: red;\n }\n}\n", "")
expectPrintedMangle(t, "@media screen { & div { color: red } }", "@media screen {\n div {\n color: red;\n }\n}\n", "")
expectPrintedMangle(t, "a { @media screen { & div { color: red } } }", "a {\n @media screen {\n & div {\n color: red;\n }\n }\n}\n", "")
+ expectPrintedMangle(t, "a { :has(& b) { color: red } }", "a {\n :has(& b) {\n color: red;\n }\n}\n", "")
+ expectPrintedMangle(t, "a { :has(& + b) { color: red } }", "a {\n :has(& + b) {\n color: red;\n }\n}\n", "")
// Reorder selectors to enable removing "&"
expectPrintedMangle(t, "reorder { & first, .second { color: red } }", "reorder {\n .second,\n first {\n color: red;\n }\n}\n", "")
@@ -1141,12 +1143,13 @@ func TestNestedSelector(t *testing.T) {
// Inline no-op nesting
expectPrintedMangle(t, "div { & { color: red } }", "div {\n color: red;\n}\n", "")
- expectPrintedMangle(t, "div { && { color: red } }", "div {\n color: red;\n}\n", "")
+ expectPrintedMangle(t, "div { && { color: red } }", "div {\n && {\n color: red;\n }\n}\n", "")
expectPrintedMangle(t, "div { zoom: 2; & { color: red } }", "div {\n zoom: 2;\n color: red;\n}\n", "")
- expectPrintedMangle(t, "div { zoom: 2; && { color: red } }", "div {\n zoom: 2;\n color: red;\n}\n", "")
- expectPrintedMangle(t, "div { &, && { color: red } zoom: 2 }", "div {\n zoom: 2;\n color: red;\n}\n", "")
- expectPrintedMangle(t, "div { &&, & { color: red } zoom: 2 }", "div {\n zoom: 2;\n color: red;\n}\n", "")
- expectPrintedMangle(t, "div { a: 1; & { b: 4 } b: 2; && { c: 5 } c: 3 }", "div {\n a: 1;\n b: 2;\n c: 3;\n b: 4;\n c: 5;\n}\n", "")
+ expectPrintedMangle(t, "div { zoom: 2; && { color: red } }", "div {\n zoom: 2;\n && {\n color: red;\n }\n}\n", "")
+ expectPrintedMangle(t, "div { &, & { color: red } zoom: 2 }", "div {\n zoom: 2;\n color: red;\n}\n", "")
+ expectPrintedMangle(t, "div { &, && { color: red } zoom: 2 }", "div {\n &,\n && {\n color: red;\n }\n zoom: 2;\n}\n", "")
+ expectPrintedMangle(t, "div { &&, & { color: red } zoom: 2 }", "div {\n &&,\n & {\n color: red;\n }\n zoom: 2;\n}\n", "")
+ expectPrintedMangle(t, "div { a: 1; & { b: 4 } b: 2; && { c: 5 } c: 3 }", "div {\n a: 1;\n b: 2;\n && {\n c: 5;\n }\n c: 3;\n b: 4;\n}\n", "")
expectPrintedMangle(t, "div { .b { x: 1 } & { x: 2 } }", "div {\n .b {\n x: 1;\n }\n x: 2;\n}\n", "")
expectPrintedMangle(t, "div { & { & { & { color: red } } & { & { zoom: 2 } } } }", "div {\n color: red;\n zoom: 2;\n}\n", "")
@@ -1186,22 +1189,22 @@ func TestNestedSelector(t *testing.T) {
expectPrintedLowerUnsupported(t, everything, ".foo, [bar~='abc'] { .baz { color: red } }", ".foo .baz,\n[bar~=abc] .baz {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".foo, [bar~='a b c'] { .baz { color: red } }", ":is(.foo, [bar~=\"a b c\"]) .baz {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".foo, [bar~='a b c'] { .baz { color: red } }", ".foo .baz,\n[bar~=\"a b c\"] .baz {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".baz { .foo, .bar { color: red } }", ".baz :is(.foo, .bar) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".baz { .foo, .bar { color: red } }", ".baz .foo,\n.baz .bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".baz { .foo, .bar { color: red } }", ".baz .foo,\n.baz .bar {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".baz { .foo, & .bar { color: red } }", ".baz :is(.foo, .bar) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".baz { .foo, & .bar { color: red } }", ".baz .foo,\n.baz .bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".baz { .foo, & .bar { color: red } }", ".baz .foo,\n.baz .bar {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".baz { & .foo, .bar { color: red } }", ".baz :is(.foo, .bar) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".baz { & .foo, .bar { color: red } }", ".baz .foo,\n.baz .bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".baz { & .foo, .bar { color: red } }", ".baz .foo,\n.baz .bar {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".baz { & .foo, & .bar { color: red } }", ".baz :is(.foo, .bar) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".baz { & .foo, & .bar { color: red } }", ".baz .foo,\n.baz .bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".baz { & .foo, & .bar { color: red } }", ".baz .foo,\n.baz .bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".baz { .foo, &.bar { color: red } }", ".baz .foo,\n.baz.bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".baz { &.foo, .bar { color: red } }", ".baz.foo,\n.baz .bar {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".baz { &.foo, &.bar { color: red } }", ".baz:is(.foo, .bar) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".baz { &.foo, &.bar { color: red } }", ".baz.foo,\n.baz.bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".baz { &.foo, &.bar { color: red } }", ".baz.foo,\n.baz.bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".foo { color: blue; & .bar { color: red } }", ".foo {\n color: blue;\n}\n.foo .bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".foo { & .bar { color: red } color: blue }", ".foo {\n color: blue;\n}\n.foo .bar {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".foo { color: blue; & .bar { color: red } zoom: 2 }", ".foo {\n color: blue;\n zoom: 2;\n}\n.foo .bar {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".a, .b { .c, .d { color: red } }", ":is(.a, .b) :is(.c, .d) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".a, .b { .c, .d { color: red } }", ":is(.a, .b) .c,\n:is(.a, .b) .d {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".a, .b { .c, .d { color: red } }", ".a .c,\n.a .d,\n.b .c,\n.b .d {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".a, .b { & > & { color: red } }", ":is(.a, .b) > :is(.a, .b) {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".a, .b { & > & { color: red } }", ".a > .a,\n.a > .b,\n.b > .a,\n.b > .b {\n color: red;\n}\n", "")
@@ -1234,33 +1237,54 @@ func TestNestedSelector(t *testing.T) {
expectPrintedLowerUnsupported(t, everything, "&, a { .b { color: red } }", ":scope .b,\na .b {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, "&, a { .b { .c { color: red } } }", ":is(:scope, a) .b .c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, "&, a { .b { .c { color: red } } }", ":scope .b .c,\na .b .c {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, "a { > b, > c { color: red } }", "a > :is(b, c) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { > b, > c { color: red } }", "a > b,\na > c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, "a { > b, > c { color: red } }", "a > b,\na > c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, "a { > b, + c { color: red } }", "a > b,\na + c {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, "a { & > b, & > c { color: red } }", "a > :is(b, c) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { & > b, & > c { color: red } }", "a > b,\na > c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, "a { & > b, & > c { color: red } }", "a > b,\na > c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, "a { & > b, & + c { color: red } }", "a > b,\na + c {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, "a { > b&, > c& { color: red } }", "a > :is(a:is(b), a:is(c)) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { > b&, > c& { color: red } }", "a > a:is(b),\na > a:is(c) {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, "a { > b&, > c& { color: red } }", "a > a:is(b),\na > a:is(c) {\n color: red;\n}\n", nestingWarningIs+nestingWarningIs)
expectPrintedLowerUnsupported(t, nesting, "a { > b&, + c& { color: red } }", "a > a:is(b),\na + a:is(c) {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, "a { > &.b, > &.c { color: red } }", "a > :is(a.b, a.c) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { > &.b, > &.c { color: red } }", "a > a.b,\na > a.c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, "a { > &.b, > &.c { color: red } }", "a > a.b,\na > a.c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, "a { > &.b, + &.c { color: red } }", "a > a.b,\na + a.c {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".a { > b&, > c& { color: red } }", ".a > :is(b.a, c.a) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".a { > b&, > c& { color: red } }", ".a > b.a,\n.a > c.a {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".a { > b&, > c& { color: red } }", ".a > b.a,\n.a > c.a {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".a { > b&, + c& { color: red } }", ".a > b.a,\n.a + c.a {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".a { > &.b, > &.c { color: red } }", ".a > :is(.a.b, .a.c) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".a { > &.b, > &.c { color: red } }", ".a > .a.b,\n.a > .a.c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".a { > &.b, > &.c { color: red } }", ".a > .a.b,\n.a > .a.c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".a { > &.b, + &.c { color: red } }", ".a > .a.b,\n.a + .a.c {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, "~ .a { > &.b, > &.c { color: red } }", "~ .a > :is(.a.b, .a.c) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "~ .a { > &.b, > &.c { color: red } }", "~ .a > .a.b,\n~ .a > .a.c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, "~ .a { > &.b, > &.c { color: red } }", "~ .a > .a.b,\n~ .a > .a.c {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, "~ .a { > &.b, + &.c { color: red } }", "~ .a > .a.b,\n~ .a + .a.c {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".foo .bar { > &.a, > &.b { color: red } }", ".foo .bar > :is(.foo .bar.a, .foo .bar.b) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".foo .bar { > &.a, > &.b { color: red } }", ".foo .bar > :is(.foo .bar).a,\n.foo .bar > :is(.foo .bar).b {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, everything, ".foo .bar { > &.a, > &.b { color: red } }", ".foo .bar > :is(.foo .bar).a,\n.foo .bar > :is(.foo .bar).b {\n color: red;\n}\n", nestingWarningIs+nestingWarningIs)
expectPrintedLowerUnsupported(t, nesting, ".foo .bar { > &.a, + &.b { color: red } }", ".foo .bar > :is(.foo .bar).a,\n.foo .bar + :is(.foo .bar).b {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".demo { .lg { &.triangle, &.circle { color: red } } }", ".demo .lg:is(.triangle, .circle) {\n color: red;\n}\n", "")
- expectPrintedLowerUnsupported(t, nesting, ".demo { .lg { .triangle, .circle { color: red } } }", ".demo .lg :is(.triangle, .circle) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".demo { .lg { &.triangle, &.circle { color: red } } }", ".demo .lg.triangle,\n.demo .lg.circle {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".demo { .lg { .triangle, .circle { color: red } } }", ".demo .lg .triangle,\n.demo .lg .circle {\n color: red;\n}\n", "")
expectPrintedLowerUnsupported(t, nesting, ".card { .featured & & & { color: red } }", ".featured .card .card .card {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".a :has(> .c) { .b & { color: red } }", ".b :is(.a :has(> .c)) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { :has(&) { color: red } }", ":has(a) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { :has(> &) { color: red } }", ":has(> a) {\n color: red;\n}\n", "")
+
+ // Duplicate "&" may be used to increase specificity
+ expectPrintedLowerUnsupported(t, nesting, ".foo { &&&.bar { color: red } }", ".foo.foo.foo.bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".foo { &&& .bar { color: red } }", ".foo.foo.foo .bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".foo { .bar&&& { color: red } }", ".foo.foo.foo.bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".foo { .bar &&& { color: red } }", ".bar .foo.foo.foo {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".foo { &.bar&.baz& { color: red } }", ".foo.foo.foo.bar.baz {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { &&&.bar { color: red } }", "a:is(a):is(a).bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { &&& .bar { color: red } }", "a:is(a):is(a) .bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { .bar&&& { color: red } }", "a:is(a):is(a).bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { .bar &&& { color: red } }", ".bar a:is(a):is(a) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a { &.bar&.baz& { color: red } }", "a:is(a):is(a).bar.baz {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a, b { &&&.bar { color: red } }", ":is(a, b):is(a, b):is(a, b).bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a, b { &&& .bar { color: red } }", ":is(a, b):is(a, b):is(a, b) .bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a, b { .bar&&& { color: red } }", ":is(a, b):is(a, b):is(a, b).bar {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a, b { .bar &&& { color: red } }", ".bar :is(a, b):is(a, b):is(a, b) {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, "a, b { &.bar&.baz& { color: red } }", ":is(a, b):is(a, b):is(a, b).bar.baz {\n color: red;\n}\n", "")
+ expectPrintedLowerUnsupported(t, nesting, ".foo { &, &&.bar, &&& .baz { color: red } }", ".foo,\n.foo.foo.bar,\n.foo.foo.foo .baz {\n color: red;\n}\n", "")
// These are invalid SASS-style nested suffixes
expectPrintedLower(t, ".card { &--header { color: red } }", ".card {\n &--header {\n color: red;\n }\n}\n",
diff --git a/internal/css_printer/css_printer.go b/internal/css_printer/css_printer.go
index c010074c6a1..d2cf5ffa308 100644
--- a/internal/css_printer/css_printer.go
+++ b/internal/css_printer/css_printer.go
@@ -419,12 +419,12 @@ func (p *printer) printComplexSelectors(selectors []css_ast.ComplexSelector, ind
}
for j, compound := range complex.Selectors {
- p.printCompoundSelector(compound, j == 0, j+1 == len(complex.Selectors), indent)
+ p.printCompoundSelector(compound, j == 0, indent)
}
}
}
-func (p *printer) printCompoundSelector(sel css_ast.CompoundSelector, isFirst bool, isLast bool, indent int32) {
+func (p *printer) printCompoundSelector(sel css_ast.CompoundSelector, isFirst bool, indent int32) {
if !isFirst && sel.Combinator.Byte == 0 {
// A space is required in between compound selectors if there is no
// combinator in the middle. It's fine to convert "a + b" into "a+b"
@@ -459,9 +459,9 @@ func (p *printer) printCompoundSelector(sel css_ast.CompoundSelector, isFirst bo
p.printNamespacedName(*sel.TypeSelector, whitespace)
}
- if sel.HasNestingSelector() {
+ for _, loc := range sel.NestingSelectorLocs {
if p.options.AddSourceMappings {
- p.builder.AddSourceMapping(logger.Loc{Start: int32(sel.NestingSelectorLoc.GetIndex())}, "", p.css)
+ p.builder.AddSourceMapping(loc, "", p.css)
}
p.print("&")
diff --git a/internal/helpers/path.go b/internal/helpers/path.go
index 87e90b8683f..2b05b16f5dd 100644
--- a/internal/helpers/path.go
+++ b/internal/helpers/path.go
@@ -1,6 +1,11 @@
package helpers
-import "strings"
+import (
+ "net/url"
+ "strings"
+
+ "github.com/evanw/esbuild/internal/fs"
+)
func IsInsideNodeModules(path string) bool {
for {
@@ -20,3 +25,37 @@ func IsInsideNodeModules(path string) bool {
path = dir
}
}
+
+func IsFileURL(fileURL *url.URL) bool {
+ return fileURL.Scheme == "file" && (fileURL.Host == "" || fileURL.Host == "localhost") && strings.HasPrefix(fileURL.Path, "/")
+}
+
+func FileURLFromFilePath(filePath string) *url.URL {
+ // Append a trailing slash so that resolving the URL includes the trailing
+ // directory, and turn Windows-style paths with volumes into URL-style paths:
+ //
+ // "/Users/User/Desktop" => "/Users/User/Desktop/"
+ // "C:\\Users\\User\\Desktop" => "/C:/Users/User/Desktop/"
+ //
+ filePath = strings.ReplaceAll(filePath, "\\", "/")
+ if !strings.HasPrefix(filePath, "/") {
+ filePath = "/" + filePath
+ }
+
+ return &url.URL{Scheme: "file", Path: filePath}
+}
+
+func FilePathFromFileURL(fs fs.FS, fileURL *url.URL) string {
+ path := fileURL.Path
+
+ // Convert URL-style paths back into Windows-style paths if needed:
+ //
+ // "/C:/Users/User/foo.js.map" => "C:\\Users\\User\\foo.js.map"
+ //
+ if !strings.HasPrefix(fs.Cwd(), "/") {
+ path = strings.TrimPrefix(path, "/")
+ path = strings.ReplaceAll(path, "/", "\\") // This is needed for "filepath.Rel()" to work
+ }
+
+ return path
+}
diff --git a/internal/js_ast/js_ast.go b/internal/js_ast/js_ast.go
index f44f1f83b2f..f8d3fe32f5b 100644
--- a/internal/js_ast/js_ast.go
+++ b/internal/js_ast/js_ast.go
@@ -374,6 +374,15 @@ type Class struct {
BodyLoc logger.Loc
CloseBraceLoc logger.Loc
+ // If true, JavaScript decorators (i.e. not TypeScript experimental
+ // decorators) should be lowered. This is the case either if JavaScript
+ // decorators are not supported in the configured target environment, or
+ // if "useDefineForClassFields" is set to false and this class has
+ // decorators on it. Note that this flag is not necessarily set to true if
+ // "useDefineForClassFields" is false and a class has an "accessor" even
+ // though the accessor feature comes from the decorator specification.
+ ShouldLowerStandardDecorators bool
+
// If true, property field initializers cannot be assumed to have no side
// effects. For example:
//
diff --git a/internal/js_ast/js_ast_helpers.go b/internal/js_ast/js_ast_helpers.go
index da78ea76919..bd8bbc3031f 100644
--- a/internal/js_ast/js_ast_helpers.go
+++ b/internal/js_ast/js_ast_helpers.go
@@ -946,14 +946,12 @@ func ToUint32(f float64) uint32 {
return uint32(ToInt32(f))
}
+// If this returns true, we know the result can't be NaN
func isInt32OrUint32(data E) bool {
switch e := data.(type) {
- case *EUnary:
- return e.Op == UnOpCpl
-
case *EBinary:
switch e.Op {
- case BinOpBitwiseAnd, BinOpBitwiseOr, BinOpBitwiseXor, BinOpShl, BinOpShr, BinOpUShr:
+ case BinOpUShr: // This is the only bitwise operator that can't return a bigint (because it throws instead)
return true
case BinOpLogicalOr, BinOpLogicalAnd:
@@ -1496,6 +1494,12 @@ func CheckEqualityIfNoSideEffects(left E, right E, kind EqualityKind) (equal boo
case *ENull, *EUndefined:
// "(not null or undefined) == undefined" is false
return false, true
+
+ default:
+ if kind == StrictEquality && IsPrimitiveLiteral(right) {
+ // "boolean === (not boolean)" is false
+ return false, true
+ }
}
case *ENumber:
@@ -1523,6 +1527,12 @@ func CheckEqualityIfNoSideEffects(left E, right E, kind EqualityKind) (equal boo
case *ENull, *EUndefined:
// "(not null or undefined) == undefined" is false
return false, true
+
+ default:
+ if kind == StrictEquality && IsPrimitiveLiteral(right) {
+ // "number === (not number)" is false
+ return false, true
+ }
}
case *EBigInt:
@@ -1535,6 +1545,12 @@ func CheckEqualityIfNoSideEffects(left E, right E, kind EqualityKind) (equal boo
case *ENull, *EUndefined:
// "(not null or undefined) == undefined" is false
return false, true
+
+ default:
+ if kind == StrictEquality && IsPrimitiveLiteral(right) {
+ // "bigint === (not bigint)" is false
+ return false, true
+ }
}
case *EString:
@@ -1547,6 +1563,12 @@ func CheckEqualityIfNoSideEffects(left E, right E, kind EqualityKind) (equal boo
case *ENull, *EUndefined:
// "(not null or undefined) == undefined" is false
return false, true
+
+ default:
+ if kind == StrictEquality && IsPrimitiveLiteral(right) {
+ // "string === (not string)" is false
+ return false, true
+ }
}
}
@@ -2081,10 +2103,10 @@ func (ctx HelperContext) SimplifyBooleanExpr(expr Expr) Expr {
// in a boolean context is unnecessary because the value is
// only truthy if it's not zero.
if e.Op == BinOpStrictNe || e.Op == BinOpLooseNe {
- // "if ((a | b) !== 0)" => "if (a | b)"
+ // "if ((a >>> b) !== 0)" => "if (a >>> b)"
return left
} else {
- // "if ((a | b) === 0)" => "if (!(a | b))"
+ // "if ((a >>> b) === 0)" => "if (!(a >>> b))"
return Not(left)
}
}
diff --git a/internal/js_parser/global_name_parser.go b/internal/js_parser/global_name_parser.go
index 0064990b75a..78649ead8aa 100644
--- a/internal/js_parser/global_name_parser.go
+++ b/internal/js_parser/global_name_parser.go
@@ -19,9 +19,22 @@ func ParseGlobalName(log logger.Log, source logger.Source) (result []string, ok
lexer := js_lexer.NewLexerGlobalName(log, source)
- // Start off with an identifier
+ // Start off with an identifier or a keyword that results in an object
result = append(result, lexer.Identifier.String)
- lexer.Expect(js_lexer.TIdentifier)
+ switch lexer.Token {
+ case js_lexer.TThis:
+ lexer.Next()
+
+ case js_lexer.TImport:
+ // Handle "import.meta"
+ lexer.Next()
+ lexer.Expect(js_lexer.TDot)
+ result = append(result, lexer.Identifier.String)
+ lexer.ExpectContextualKeyword("meta")
+
+ default:
+ lexer.Expect(js_lexer.TIdentifier)
+ }
// Follow with dot or index expressions
for lexer.Token != js_lexer.TEndOfFile {
diff --git a/internal/js_parser/js_parser.go b/internal/js_parser/js_parser.go
index 8da1a703ce4..cabcd2ec2d6 100644
--- a/internal/js_parser/js_parser.go
+++ b/internal/js_parser/js_parser.go
@@ -3,6 +3,7 @@ package js_parser
import (
"fmt"
"math"
+ "math/big"
"regexp"
"sort"
"strings"
@@ -227,6 +228,7 @@ type parser struct {
importMetaRef ast.Ref
promiseRef ast.Ref
regExpRef ast.Ref
+ bigIntRef ast.Ref
superCtorRef ast.Ref
// Imports from "react/jsx-runtime" and "react", respectively.
@@ -385,6 +387,7 @@ type parser struct {
latestReturnHadSemicolon bool
messageAboutThisIsUndefined bool
isControlFlowDead bool
+ shouldAddKeyComment bool
// If this is true, then all top-level statements are wrapped in a try/catch
willWrapModuleInTryCatchForUsing bool
@@ -744,6 +747,89 @@ type fnOnlyDataVisit struct {
silenceMessageAboutThisBeingUndefined bool
}
+type livenessStatus int8
+
+const (
+ alwaysDead livenessStatus = -1
+ livenessUnknown livenessStatus = 0
+ alwaysLive livenessStatus = 1
+)
+
+type switchCaseLiveness struct {
+ status livenessStatus
+ canFallThrough bool
+}
+
+func analyzeSwitchCasesForLiveness(s *js_ast.SSwitch) []switchCaseLiveness {
+ cases := make([]switchCaseLiveness, 0, len(s.Cases))
+ defaultIndex := -1
+
+ // Determine the status of the individual cases independently
+ maxStatus := alwaysDead
+ for i, c := range s.Cases {
+ if c.ValueOrNil.Data == nil {
+ defaultIndex = i
+ }
+
+ // Check the value for strict equality
+ var status livenessStatus
+ if maxStatus == alwaysLive {
+ status = alwaysDead // Everything after an always-live case is always dead
+ } else if c.ValueOrNil.Data == nil {
+ status = alwaysDead // This is the default case, and will be filled in later
+ } else if isEqualToTest, ok := js_ast.CheckEqualityIfNoSideEffects(s.Test.Data, c.ValueOrNil.Data, js_ast.StrictEquality); ok {
+ if isEqualToTest {
+ status = alwaysLive // This branch will always be matched, and will be taken unless an earlier branch was taken
+ } else {
+ status = alwaysDead // This branch will never be matched, and will not be taken unless there was fall-through
+ }
+ } else {
+ status = livenessUnknown // This branch depends on run-time values and may or may not be matched
+ }
+ if maxStatus < status {
+ maxStatus = status
+ }
+
+ // Check for potential fall-through by checking for a jump at the end of the body
+ canFallThrough := true
+ stmts := c.Body
+ for len(stmts) > 0 {
+ switch s := stmts[len(stmts)-1].Data.(type) {
+ case *js_ast.SBlock:
+ stmts = s.Stmts // If this ends with a block, check the block's body next
+ continue
+ case *js_ast.SBreak, *js_ast.SContinue, *js_ast.SReturn, *js_ast.SThrow:
+ canFallThrough = false
+ }
+ break
+ }
+
+ cases = append(cases, switchCaseLiveness{
+ status: status,
+ canFallThrough: canFallThrough,
+ })
+ }
+
+ // Set the liveness for the default case last based on the other cases
+ if defaultIndex != -1 {
+ // The negation here transposes "always live" with "always dead"
+ cases[defaultIndex].status = -maxStatus
+ }
+
+ // Then propagate fall-through information in linear fall-through order
+ for i, c := range cases {
+ // Propagate state forward if this isn't dead. Note that the "can fall
+ // through" flag does not imply "must fall through". The body may have
+ // an embedded "break" inside an if statement, for example.
+ if c.status != alwaysDead {
+ for j := i + 1; j < len(cases) && cases[j-1].canFallThrough; j++ {
+ cases[j].status = livenessUnknown
+ }
+ }
+ }
+ return cases
+}
+
const bloomFilterSize = 251
type duplicateCaseValue struct {
@@ -1765,6 +1851,14 @@ func (p *parser) makeRegExpRef() ast.Ref {
return p.regExpRef
}
+func (p *parser) makeBigIntRef() ast.Ref {
+ if p.bigIntRef == ast.InvalidRef {
+ p.bigIntRef = p.newSymbol(ast.SymbolUnbound, "BigInt")
+ p.moduleScope.Generated = append(p.moduleScope.Generated, p.bigIntRef)
+ }
+ return p.bigIntRef
+}
+
// The name is temporarily stored in the ref until the scope traversal pass
// happens, at which point a symbol will be generated and the ref will point
// to the symbol instead.
@@ -2026,6 +2120,15 @@ func (p *parser) parseStringLiteral() js_ast.Expr {
return value
}
+func (p *parser) parseBigIntOrStringIfUnsupported() js_ast.Expr {
+ if p.options.unsupportedJSFeatures.Has(compat.Bigint) {
+ var i big.Int
+ fmt.Sscan(p.lexer.Identifier.String, &i)
+ return js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EString{Value: helpers.StringToUTF16(i.String())}}
+ }
+ return js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EBigInt{Value: p.lexer.Identifier.String}}
+}
+
type propertyOpts struct {
decorators []js_ast.Decorator
decoratorScope *js_ast.Scope
@@ -2064,8 +2167,7 @@ func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, op
}
case js_lexer.TBigIntegerLiteral:
- key = js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EBigInt{Value: p.lexer.Identifier.String}}
- p.markSyntaxFeature(compat.Bigint, p.lexer.Range())
+ key = p.parseBigIntOrStringIfUnsupported()
p.lexer.Next()
case js_lexer.TPrivateIdentifier:
@@ -2136,47 +2238,50 @@ func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, op
couldBeModifierKeyword := p.lexer.IsIdentifierOrKeyword()
if !couldBeModifierKeyword {
switch p.lexer.Token {
- case js_lexer.TOpenBracket, js_lexer.TNumericLiteral, js_lexer.TStringLiteral,
- js_lexer.TAsterisk, js_lexer.TPrivateIdentifier:
+ case js_lexer.TOpenBracket, js_lexer.TNumericLiteral, js_lexer.TStringLiteral, js_lexer.TPrivateIdentifier:
couldBeModifierKeyword = true
+ case js_lexer.TAsterisk:
+ if opts.isAsync || (raw != "get" && raw != "set") {
+ couldBeModifierKeyword = true
+ }
}
}
// If so, check for a modifier keyword
if couldBeModifierKeyword {
- switch name.String {
+ switch raw {
case "get":
- if !opts.isAsync && raw == name.String {
+ if !opts.isAsync {
p.markSyntaxFeature(compat.ObjectAccessors, nameRange)
return p.parseProperty(startLoc, js_ast.PropertyGetter, opts, nil)
}
case "set":
- if !opts.isAsync && raw == name.String {
+ if !opts.isAsync {
p.markSyntaxFeature(compat.ObjectAccessors, nameRange)
return p.parseProperty(startLoc, js_ast.PropertySetter, opts, nil)
}
case "accessor":
- if !p.lexer.HasNewlineBefore && !opts.isAsync && opts.isClass && raw == name.String {
+ if !p.lexer.HasNewlineBefore && !opts.isAsync && opts.isClass {
return p.parseProperty(startLoc, js_ast.PropertyAutoAccessor, opts, nil)
}
case "async":
- if !p.lexer.HasNewlineBefore && !opts.isAsync && raw == name.String {
+ if !p.lexer.HasNewlineBefore && !opts.isAsync {
opts.isAsync = true
opts.asyncRange = nameRange
return p.parseProperty(startLoc, js_ast.PropertyMethod, opts, nil)
}
case "static":
- if !opts.isStatic && !opts.isAsync && opts.isClass && raw == name.String {
+ if !opts.isStatic && !opts.isAsync && opts.isClass {
opts.isStatic = true
return p.parseProperty(startLoc, kind, opts, nil)
}
case "declare":
- if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && opts.tsDeclareRange.Len == 0 && raw == name.String {
+ if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && opts.tsDeclareRange.Len == 0 {
opts.tsDeclareRange = nameRange
scopeIndex := len(p.scopesInOrder)
@@ -2213,7 +2318,7 @@ func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, op
}
case "abstract":
- if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && !opts.isTSAbstract && raw == name.String {
+ if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && !opts.isTSAbstract {
opts.isTSAbstract = true
scopeIndex := len(p.scopesInOrder)
@@ -2249,7 +2354,7 @@ func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, op
case "private", "protected", "public", "readonly", "override":
// Skip over TypeScript keywords
- if opts.isClass && p.options.ts.Parse && raw == name.String {
+ if opts.isClass && p.options.ts.Parse {
return p.parseProperty(startLoc, kind, opts, nil)
}
}
@@ -2284,7 +2389,10 @@ func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, op
}
if p.isMangledProp(name.String) {
- key = js_ast.Expr{Loc: nameRange.Loc, Data: &js_ast.ENameOfSymbol{Ref: p.storeNameInRef(name)}}
+ key = js_ast.Expr{Loc: nameRange.Loc, Data: &js_ast.ENameOfSymbol{
+ Ref: p.storeNameInRef(name),
+ HasPropertyKeyComment: true,
+ }}
} else {
key = js_ast.Expr{Loc: nameRange.Loc, Data: &js_ast.EString{Value: helpers.StringToUTF16(name.String)}}
}
@@ -2624,8 +2732,7 @@ func (p *parser) parsePropertyBinding() js_ast.PropertyBinding {
preferQuotedKey = !p.options.minifySyntax
case js_lexer.TBigIntegerLiteral:
- key = js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EBigInt{Value: p.lexer.Identifier.String}}
- p.markSyntaxFeature(compat.Bigint, p.lexer.Range())
+ key = p.parseBigIntOrStringIfUnsupported()
p.lexer.Next()
case js_lexer.TOpenBracket:
@@ -2876,9 +2983,10 @@ func (p *parser) parseAsyncPrefixExpr(asyncRange logger.Range, level js_ast.L, f
// "async x => {}"
case js_lexer.TIdentifier:
if level <= js_ast.LAssign {
- // See https://github.com/tc39/ecma262/issues/2034 for details
isArrowFn := true
if (flags&exprFlagForLoopInit) != 0 && p.lexer.Identifier.String == "of" {
+ // See https://github.com/tc39/ecma262/issues/2034 for details
+
// "for (async of" is only an arrow function if the next token is "=>"
isArrowFn = p.checkForArrowAfterTheCurrentToken()
@@ -2888,6 +2996,18 @@ func (p *parser) parseAsyncPrefixExpr(asyncRange logger.Range, level js_ast.L, f
p.log.AddError(&p.tracker, r, "For loop initializers cannot start with \"async of\"")
panic(js_lexer.LexerPanic{})
}
+ } else if p.options.ts.Parse && p.lexer.Token == js_lexer.TIdentifier {
+ // Make sure we can parse the following TypeScript code:
+ //
+ // export function open(async?: boolean): void {
+ // console.log(async as boolean)
+ // }
+ //
+ // TypeScript solves this by using a two-token lookahead to check for
+ // "=>" after an identifier after the "async". This is done in
+ // "isUnParenthesizedAsyncArrowFunctionWorker" which was introduced
+ // here: https://github.com/microsoft/TypeScript/pull/8444
+ isArrowFn = p.checkForArrowAfterTheCurrentToken()
}
if isArrowFn {
@@ -3528,7 +3648,6 @@ func (p *parser) parsePrefix(level js_ast.L, errors *deferredErrors, flags exprF
case js_lexer.TBigIntegerLiteral:
value := p.lexer.Identifier
- p.markSyntaxFeature(compat.Bigint, p.lexer.Range())
p.lexer.Next()
return js_ast.Expr{Loc: loc, Data: &js_ast.EBigInt{Value: value.String}}
@@ -6287,6 +6406,7 @@ func (p *parser) parseClass(classKeyword logger.Range, name *ast.LocRef, classOp
bodyLoc := p.lexer.Loc()
p.lexer.Expect(js_lexer.TOpenBrace)
properties := []js_ast.Property{}
+ hasPropertyDecorator := false
// Allow "in" and private fields inside class bodies
oldAllowIn := p.allowIn
@@ -6316,6 +6436,9 @@ func (p *parser) parseClass(classKeyword logger.Range, name *ast.LocRef, classOp
firstDecoratorLoc := p.lexer.Loc()
scopeIndex := len(p.scopesInOrder)
opts.decorators = p.parseDecorators(p.currentScope, classKeyword, opts.decoratorContext)
+ if len(opts.decorators) > 0 {
+ hasPropertyDecorator = true
+ }
// This property may turn out to be a type in TypeScript, which should be ignored
if property, ok := p.parseProperty(p.saveExprCommentsHere(), js_ast.PropertyField, opts, nil); ok {
@@ -6353,6 +6476,33 @@ func (p *parser) parseClass(classKeyword logger.Range, name *ast.LocRef, classOp
closeBraceLoc := p.saveExprCommentsHere()
p.lexer.Expect(js_lexer.TCloseBrace)
+
+ // TypeScript has legacy behavior that uses assignment semantics instead of
+ // define semantics for class fields when "useDefineForClassFields" is enabled
+ // (in which case TypeScript behaves differently than JavaScript, which is
+ // arguably "wrong").
+ //
+ // This legacy behavior exists because TypeScript added class fields to
+ // TypeScript before they were added to JavaScript. They decided to go with
+ // assignment semantics for whatever reason. Later on TC39 decided to go with
+ // define semantics for class fields instead. This behaves differently if the
+ // base class has a setter with the same name.
+ //
+ // The value of "useDefineForClassFields" defaults to false when it's not
+ // specified and the target is earlier than "ES2022" since the class field
+ // language feature was added in ES2022. However, TypeScript's "target"
+ // setting currently defaults to "ES3" which unfortunately means that the
+ // "useDefineForClassFields" setting defaults to false (i.e. to "wrong").
+ //
+ // We default "useDefineForClassFields" to true (i.e. to "correct") instead.
+ // This is partially because our target defaults to "esnext", and partially
+ // because this is a legacy behavior that no one should be using anymore.
+ // Users that want the wrong behavior can either set "useDefineForClassFields"
+ // to false in "tsconfig.json" explicitly, or set TypeScript's "target" to
+ // "ES2021" or earlier in their in "tsconfig.json" file.
+ useDefineForClassFields := !p.options.ts.Parse || p.options.ts.Config.UseDefineForClassFields == config.True ||
+ (p.options.ts.Config.UseDefineForClassFields == config.Unspecified && p.options.ts.Config.Target != config.TSTargetBelowES2022)
+
return js_ast.Class{
ClassKeyword: classKeyword,
Decorators: classOpts.decorators,
@@ -6362,31 +6512,16 @@ func (p *parser) parseClass(classKeyword logger.Range, name *ast.LocRef, classOp
Properties: properties,
CloseBraceLoc: closeBraceLoc,
- // TypeScript has legacy behavior that uses assignment semantics instead of
- // define semantics for class fields when "useDefineForClassFields" is enabled
- // (in which case TypeScript behaves differently than JavaScript, which is
- // arguably "wrong").
- //
- // This legacy behavior exists because TypeScript added class fields to
- // TypeScript before they were added to JavaScript. They decided to go with
- // assignment semantics for whatever reason. Later on TC39 decided to go with
- // define semantics for class fields instead. This behaves differently if the
- // base class has a setter with the same name.
- //
- // The value of "useDefineForClassFields" defaults to false when it's not
- // specified and the target is earlier than "ES2022" since the class field
- // language feature was added in ES2022. However, TypeScript's "target"
- // setting currently defaults to "ES3" which unfortunately means that the
- // "useDefineForClassFields" setting defaults to false (i.e. to "wrong").
- //
- // We default "useDefineForClassFields" to true (i.e. to "correct") instead.
- // This is partially because our target defaults to "esnext", and partially
- // because this is a legacy behavior that no one should be using anymore.
- // Users that want the wrong behavior can either set "useDefineForClassFields"
- // to false in "tsconfig.json" explicitly, or set TypeScript's "target" to
- // "ES2021" or earlier in their in "tsconfig.json" file.
- UseDefineForClassFields: !p.options.ts.Parse || p.options.ts.Config.UseDefineForClassFields == config.True ||
- (p.options.ts.Config.UseDefineForClassFields == config.Unspecified && p.options.ts.Config.Target != config.TSTargetBelowES2022),
+ // Always lower standard decorators if they are present and TypeScript's
+ // "useDefineForClassFields" setting is false even if the configured target
+ // environment supports decorators. This setting changes the behavior of
+ // class fields, and so we must lower decorators so they behave correctly.
+ ShouldLowerStandardDecorators: (len(classOpts.decorators) > 0 || hasPropertyDecorator) &&
+ ((!p.options.ts.Parse && p.options.unsupportedJSFeatures.Has(compat.Decorators)) ||
+ (p.options.ts.Parse && p.options.ts.Config.ExperimentalDecorators != config.True &&
+ (p.options.unsupportedJSFeatures.Has(compat.Decorators) || !useDefineForClassFields))),
+
+ UseDefineForClassFields: useDefineForClassFields,
}
}
@@ -10302,7 +10437,7 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_
}
// Handle "for await" that has been lowered by moving this label inside the "try"
- if try, ok := s.Stmt.Data.(*js_ast.STry); ok && len(try.Block.Stmts) > 0 {
+ if try, ok := s.Stmt.Data.(*js_ast.STry); ok && len(try.Block.Stmts) == 1 {
if _, ok := try.Block.Stmts[0].Data.(*js_ast.SFor); ok {
try.Block.Stmts[0] = js_ast.Stmt{Loc: stmt.Loc, Data: &js_ast.SLabel{
Stmt: try.Block.Stmts[0],
@@ -10755,6 +10890,13 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_
p.popScope()
if s.Catch != nil {
+ old := p.isControlFlowDead
+
+ // If the try body is empty, then the catch body is dead
+ if len(s.Block.Stmts) == 0 {
+ p.isControlFlowDead = true
+ }
+
p.pushScopeForVisitPass(js_ast.ScopeCatchBinding, s.Catch.Loc)
if s.Catch.BindingOrNil.Data != nil {
p.visitBinding(s.Catch.BindingOrNil, bindingOpts{})
@@ -10766,6 +10908,8 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_
p.lowerObjectRestInCatchBinding(s.Catch)
p.popScope()
+
+ p.isControlFlowDead = old
}
if s.Finally != nil {
@@ -10774,24 +10918,59 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_
p.popScope()
}
+ // Drop the whole thing if the try body is empty
+ if p.options.minifySyntax && len(s.Block.Stmts) == 0 {
+ keepCatch := false
+
+ // Certain "catch" blocks need to be preserved:
+ //
+ // try {} catch { let foo } // Can be removed
+ // try {} catch { var foo } // Must be kept
+ //
+ if s.Catch != nil {
+ for _, stmt2 := range s.Catch.Block.Stmts {
+ if shouldKeepStmtInDeadControlFlow(stmt2) {
+ keepCatch = true
+ break
+ }
+ }
+ }
+
+ // Make sure to preserve the "finally" block if present
+ if !keepCatch {
+ if s.Finally == nil {
+ return stmts
+ }
+ finallyNeedsBlock := false
+ for _, stmt2 := range s.Finally.Block.Stmts {
+ if statementCaresAboutScope(stmt2) {
+ finallyNeedsBlock = true
+ break
+ }
+ }
+ if !finallyNeedsBlock {
+ return append(stmts, s.Finally.Block.Stmts...)
+ }
+ block := s.Finally.Block
+ stmt = js_ast.Stmt{Loc: s.Finally.Loc, Data: &block}
+ }
+ }
+
case *js_ast.SSwitch:
s.Test = p.visitExpr(s.Test)
p.pushScopeForVisitPass(js_ast.ScopeBlock, s.BodyLoc)
oldIsInsideSwitch := p.fnOrArrowDataVisit.isInsideSwitch
p.fnOrArrowDataVisit.isInsideSwitch = true
- for i, c := range s.Cases {
+
+ // Visit case values first
+ for i := range s.Cases {
+ c := &s.Cases[i]
if c.ValueOrNil.Data != nil {
c.ValueOrNil = p.visitExpr(c.ValueOrNil)
p.warnAboutEqualityCheck("case", c.ValueOrNil, c.ValueOrNil.Loc)
p.warnAboutTypeofAndString(s.Test, c.ValueOrNil, onlyCheckOriginalOrder)
}
- c.Body = p.visitStmts(c.Body, stmtsSwitch)
-
- // Make sure the assignment to the body above is preserved
- s.Cases[i] = c
}
- p.fnOrArrowDataVisit.isInsideSwitch = oldIsInsideSwitch
- p.popScope()
// Check for duplicate case values
p.duplicateCaseChecker.reset()
@@ -10801,6 +10980,40 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_
}
}
+ // Then analyze the cases to determine which ones are live and/or dead
+ cases := analyzeSwitchCasesForLiveness(s)
+
+ // Then visit case bodies, and potentially filter out dead cases
+ end := 0
+ for i, c := range s.Cases {
+ isAlwaysDead := cases[i].status == alwaysDead
+
+ // Potentially treat the case body as dead code
+ old := p.isControlFlowDead
+ if isAlwaysDead {
+ p.isControlFlowDead = true
+ }
+ c.Body = p.visitStmts(c.Body, stmtsSwitch)
+ p.isControlFlowDead = old
+
+ // Filter out this case when minifying if it's known to be dead. Visiting
+ // the body above should already have removed any statements that can be
+ // removed safely, so if the body isn't empty then that means it contains
+ // some statements that can't be removed safely (e.g. a hoisted "var").
+ // So don't remove this case if the body isn't empty.
+ if p.options.minifySyntax && isAlwaysDead && len(c.Body) == 0 {
+ continue
+ }
+
+ // Make sure the assignment to the body above is preserved
+ s.Cases[end] = c
+ end++
+ }
+ s.Cases = s.Cases[:end]
+
+ p.fnOrArrowDataVisit.isInsideSwitch = oldIsInsideSwitch
+ p.popScope()
+
// Unwrap switch statements in dead code
if p.options.minifySyntax && p.isControlFlowDead {
for _, c := range s.Cases {
@@ -10814,6 +11027,36 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_
return append(stmts, lowered...)
}
+ // Attempt to remove statically-determined switch statements
+ if p.options.minifySyntax {
+ if len(s.Cases) == 0 {
+ if p.astHelpers.ExprCanBeRemovedIfUnused(s.Test) {
+ // Remove everything
+ return stmts
+ } else {
+ // Just keep the test expression
+ return append(stmts, js_ast.Stmt{Loc: s.Test.Loc, Data: &js_ast.SExpr{Value: s.Test}})
+ }
+ } else if len(s.Cases) == 1 {
+ c := s.Cases[0]
+ var isTaken bool
+ var ok bool
+ if c.ValueOrNil.Data != nil {
+ // Non-default case
+ isTaken, ok = js_ast.CheckEqualityIfNoSideEffects(s.Test.Data, c.ValueOrNil.Data, js_ast.StrictEquality)
+ } else {
+ // Default case
+ isTaken, ok = true, p.astHelpers.ExprCanBeRemovedIfUnused(s.Test)
+ }
+ if ok && isTaken {
+ if body, ok := tryToInlineCaseBody(s.BodyLoc, c.Body, s.CloseBraceLoc); ok {
+ // Inline the case body
+ return append(stmts, body...)
+ }
+ }
+ }
+ }
+
case *js_ast.SFunction:
p.visitFn(&s.Fn, s.Fn.OpenParenLoc, visitFnOpts{})
@@ -11107,6 +11350,51 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_
return stmts
}
+func tryToInlineCaseBody(openBraceLoc logger.Loc, stmts []js_ast.Stmt, closeBraceLoc logger.Loc) ([]js_ast.Stmt, bool) {
+ if len(stmts) == 1 {
+ if block, ok := stmts[0].Data.(*js_ast.SBlock); ok {
+ return tryToInlineCaseBody(stmts[0].Loc, block.Stmts, block.CloseBraceLoc)
+ }
+ }
+
+ caresAboutScope := false
+
+loop:
+ for i, stmt := range stmts {
+ switch s := stmt.Data.(type) {
+ case *js_ast.SEmpty, *js_ast.SDirective, *js_ast.SComment, *js_ast.SExpr,
+ *js_ast.SDebugger, *js_ast.SContinue, *js_ast.SReturn, *js_ast.SThrow:
+ // These can all be inlined outside of the switch without problems
+ continue
+
+ case *js_ast.SLocal:
+ if s.Kind != js_ast.LocalVar {
+ caresAboutScope = true
+ }
+
+ case *js_ast.SBreak:
+ if s.Label != nil {
+ // The break label could target this switch, but we don't know whether that's the case or not here
+ return nil, false
+ }
+
+ // An unlabeled "break" inside a switch breaks out of the case
+ stmts = stmts[:i]
+ break loop
+
+ default:
+ // Assume anything else can't be inlined
+ return nil, false
+ }
+ }
+
+ // If we still need a scope, wrap the result in a block
+ if caresAboutScope {
+ return []js_ast.Stmt{{Loc: openBraceLoc, Data: &js_ast.SBlock{Stmts: stmts, CloseBraceLoc: closeBraceLoc}}}, true
+ }
+ return stmts, true
+}
+
func isUnsightlyPrimitive(data js_ast.E) bool {
switch data.(type) {
case *js_ast.EBoolean, *js_ast.ENull, *js_ast.EUndefined, *js_ast.ENumber, *js_ast.EBigInt, *js_ast.EString:
@@ -11956,8 +12244,8 @@ func (p *parser) instantiateDefineExpr(loc logger.Loc, expr config.DefineExpr, o
// Substitute user-specified defines
if defines, ok := p.options.defines.DotDefines[parts[len(parts)-1]]; ok {
for _, define := range defines {
- if define.Data.DefineExpr != nil && helpers.StringArraysEqual(define.Parts, parts) {
- return p.instantiateDefineExpr(loc, *define.Data.DefineExpr, opts)
+ if define.DefineExpr != nil && helpers.StringArraysEqual(define.KeyParts, parts) {
+ return p.instantiateDefineExpr(loc, *define.DefineExpr, opts)
}
}
}
@@ -12523,6 +12811,7 @@ type exprOut struct {
// If true and this is used as a call target, the whole call expression
// must be replaced with undefined.
+ callMustBeReplacedWithUndefined bool
methodCallMustBeReplacedWithUndefined bool
}
@@ -12812,7 +13101,18 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
// it doesn't affect these mitigations by ensuring that the mitigations are not
// applied in those cases (e.g. by adding an additional conditional check).
switch e := expr.Data.(type) {
- case *js_ast.ENull, *js_ast.ESuper, *js_ast.EBoolean, *js_ast.EBigInt, *js_ast.EUndefined, *js_ast.EJSXText:
+ case *js_ast.ENull, *js_ast.ESuper, *js_ast.EBoolean, *js_ast.EUndefined, *js_ast.EJSXText:
+
+ case *js_ast.EBigInt:
+ if p.options.unsupportedJSFeatures.Has(compat.Bigint) {
+ // For ease of implementation, the actual reference of the "BigInt"
+ // symbol is deferred to print time. That means we don't have to
+ // special-case the "BigInt" constructor in side-effect computations
+ // and future big integer constant folding (of which there isn't any
+ // at the moment).
+ p.markSyntaxFeature(compat.Bigint, p.source.RangeOfNumber(expr.Loc))
+ p.recordUsage(p.makeBigIntRef())
+ }
case *js_ast.ENameOfSymbol:
e.Ref = p.symbolForMangledProp(p.loadNameFromRef(e.Ref))
@@ -12857,7 +13157,8 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
if in.shouldMangleStringsAsProps && p.options.mangleQuoted && !e.PreferTemplate {
if name := helpers.UTF16ToString(e.Value); p.isMangledProp(name) {
return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.ENameOfSymbol{
- Ref: p.symbolForMangledProp(name),
+ Ref: p.symbolForMangledProp(name),
+ HasPropertyKeyComment: e.HasPropertyKeyComment,
}}, exprOut{}
}
}
@@ -12890,10 +13191,10 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
// Check both user-specified defines and known globals
if defines, ok := p.options.defines.DotDefines["meta"]; ok {
for _, define := range defines {
- if p.isDotOrIndexDefineMatch(expr, define.Parts) {
+ if p.isDotOrIndexDefineMatch(expr, define.KeyParts) {
// Substitute user-specified defines
- if define.Data.DefineExpr != nil {
- return p.instantiateDefineExpr(expr.Loc, *define.Data.DefineExpr, identifierOpts{
+ if define.DefineExpr != nil {
+ return p.instantiateDefineExpr(expr.Loc, *define.DefineExpr, identifierOpts{
assignTarget: in.assignTarget,
isCallTarget: isCallTarget,
isDeleteTarget: isDeleteTarget,
@@ -13521,10 +13822,10 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
// Check both user-specified defines and known globals
if defines, ok := p.options.defines.DotDefines[e.Name]; ok {
for _, define := range defines {
- if p.isDotOrIndexDefineMatch(expr, define.Parts) {
+ if p.isDotOrIndexDefineMatch(expr, define.KeyParts) {
// Substitute user-specified defines
- if define.Data.DefineExpr != nil {
- new := p.instantiateDefineExpr(expr.Loc, *define.Data.DefineExpr, identifierOpts{
+ if define.DefineExpr != nil {
+ new := p.instantiateDefineExpr(expr.Loc, *define.DefineExpr, identifierOpts{
assignTarget: in.assignTarget,
isCallTarget: isCallTarget,
isDeleteTarget: isDeleteTarget,
@@ -13540,13 +13841,13 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
}
// Copy the side effect flags over in case this expression is unused
- if define.Data.Flags.Has(config.CanBeRemovedIfUnused) {
+ if define.Flags.Has(config.CanBeRemovedIfUnused) {
e.CanBeRemovedIfUnused = true
}
- if define.Data.Flags.Has(config.CallCanBeUnwrappedIfUnused) && !p.options.ignoreDCEAnnotations {
+ if define.Flags.Has(config.CallCanBeUnwrappedIfUnused) && !p.options.ignoreDCEAnnotations {
e.CallCanBeUnwrappedIfUnused = true
}
- if define.Data.Flags.Has(config.IsSymbolInstance) {
+ if define.Flags.Has(config.IsSymbolInstance) {
e.IsSymbolInstance = true
}
break
@@ -13615,12 +13916,23 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
return p.lowerOptionalChain(expr, in, out)
}
+ // Also erase "console.log.call(console, 123)" and "console.log.bind(console)"
+ if out.callMustBeReplacedWithUndefined {
+ if e.Name == "call" || e.Name == "apply" {
+ out.methodCallMustBeReplacedWithUndefined = true
+ } else if p.options.unsupportedJSFeatures.Has(compat.Arrow) {
+ e.Target.Data = &js_ast.EFunction{}
+ } else {
+ e.Target.Data = &js_ast.EArrow{}
+ }
+ }
+
// Potentially rewrite this property access
out = exprOut{
- childContainsOptionalChain: containsOptionalChain,
- methodCallMustBeReplacedWithUndefined: out.methodCallMustBeReplacedWithUndefined,
- thisArgFunc: out.thisArgFunc,
- thisArgWrapFunc: out.thisArgWrapFunc,
+ childContainsOptionalChain: containsOptionalChain,
+ callMustBeReplacedWithUndefined: out.methodCallMustBeReplacedWithUndefined,
+ thisArgFunc: out.thisArgFunc,
+ thisArgWrapFunc: out.thisArgWrapFunc,
}
if !in.hasChainParent {
out.thisArgFunc = nil
@@ -13643,10 +13955,10 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
if str, ok := e.Index.Data.(*js_ast.EString); ok {
if defines, ok := p.options.defines.DotDefines[helpers.UTF16ToString(str.Value)]; ok {
for _, define := range defines {
- if p.isDotOrIndexDefineMatch(expr, define.Parts) {
+ if p.isDotOrIndexDefineMatch(expr, define.KeyParts) {
// Substitute user-specified defines
- if define.Data.DefineExpr != nil {
- new := p.instantiateDefineExpr(expr.Loc, *define.Data.DefineExpr, identifierOpts{
+ if define.DefineExpr != nil {
+ new := p.instantiateDefineExpr(expr.Loc, *define.DefineExpr, identifierOpts{
assignTarget: in.assignTarget,
isCallTarget: isCallTarget,
isDeleteTarget: isDeleteTarget,
@@ -13666,13 +13978,13 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
}
// Copy the side effect flags over in case this expression is unused
- if define.Data.Flags.Has(config.CanBeRemovedIfUnused) {
+ if define.Flags.Has(config.CanBeRemovedIfUnused) {
e.CanBeRemovedIfUnused = true
}
- if define.Data.Flags.Has(config.CallCanBeUnwrappedIfUnused) && !p.options.ignoreDCEAnnotations {
+ if define.Flags.Has(config.CallCanBeUnwrappedIfUnused) && !p.options.ignoreDCEAnnotations {
e.CallCanBeUnwrappedIfUnused = true
}
- if define.Data.Flags.Has(config.IsSymbolInstance) {
+ if define.Flags.Has(config.IsSymbolInstance) {
e.IsSymbolInstance = true
}
break
@@ -13779,10 +14091,10 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
// Potentially rewrite this property access
out = exprOut{
- childContainsOptionalChain: containsOptionalChain,
- methodCallMustBeReplacedWithUndefined: out.methodCallMustBeReplacedWithUndefined,
- thisArgFunc: out.thisArgFunc,
- thisArgWrapFunc: out.thisArgWrapFunc,
+ childContainsOptionalChain: containsOptionalChain,
+ callMustBeReplacedWithUndefined: out.methodCallMustBeReplacedWithUndefined,
+ thisArgFunc: out.thisArgFunc,
+ thisArgWrapFunc: out.thisArgWrapFunc,
}
if !in.hasChainParent {
out.thisArgFunc = nil
@@ -13910,7 +14222,25 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
}
case js_ast.UnOpVoid:
- if p.astHelpers.ExprCanBeRemovedIfUnused(e.Value) {
+ var shouldRemove bool
+ if p.options.minifySyntax {
+ shouldRemove = p.astHelpers.ExprCanBeRemovedIfUnused(e.Value)
+ } else {
+ // This special case was added for a very obscure reason. There's a
+ // custom dialect of JavaScript called Svelte that uses JavaScript
+ // syntax with different semantics. Specifically variable accesses
+ // have side effects (!). And someone wants to use "void x" instead
+ // of just "x" to trigger the side effect for some reason.
+ //
+ // Arguably this should not be supported, because you shouldn't be
+ // running esbuild on weird kinda-JavaScript-but-not languages and
+ // expecting it to work correctly. But this one special case seems
+ // harmless enough. This is definitely not fully supported though.
+ //
+ // More info: https://github.com/evanw/esbuild/issues/4041
+ shouldRemove = isUnsightlyPrimitive(e.Value.Data)
+ }
+ if shouldRemove {
return js_ast.Expr{Loc: expr.Loc, Data: js_ast.EUndefinedShared}, exprOut{}
}
@@ -14502,11 +14832,11 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
oldIsControlFlowDead := p.isControlFlowDead
// If we're removing this call, don't count any arguments as symbol uses
- if out.methodCallMustBeReplacedWithUndefined {
+ if out.callMustBeReplacedWithUndefined {
if js_ast.IsPropertyAccess(e.Target) {
p.isControlFlowDead = true
} else {
- out.methodCallMustBeReplacedWithUndefined = false
+ out.callMustBeReplacedWithUndefined = false
}
}
@@ -14578,7 +14908,7 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
}
// Stop now if this call must be removed
- if out.methodCallMustBeReplacedWithUndefined {
+ if out.callMustBeReplacedWithUndefined {
p.isControlFlowDead = oldIsControlFlowDead
return js_ast.Expr{Loc: expr.Loc, Data: js_ast.EUndefinedShared}, exprOut{}
}
@@ -16233,7 +16563,7 @@ func (p *parser) handleIdentifier(loc logger.Loc, e *js_ast.EIdentifier, opts id
ref := e.Ref
// Substitute inlined constants
- if p.options.minifySyntax {
+ if p.options.minifySyntax && !p.currentScope.ContainsDirectEval {
if value, ok := p.constValues[ref]; ok {
p.ignoreUsage(ref)
return js_ast.ConstValueToExpr(loc, value)
@@ -16932,6 +17262,7 @@ func newParser(log logger.Log, source logger.Source, lexer js_lexer.Lexer, optio
runtimeImports: make(map[string]ast.LocRef),
promiseRef: ast.InvalidRef,
regExpRef: ast.InvalidRef,
+ bigIntRef: ast.InvalidRef,
afterArrowBodyLoc: logger.Loc{Start: -1},
firstJSXElementLoc: logger.Loc{Start: -1},
importMetaRef: ast.InvalidRef,
@@ -16959,6 +17290,12 @@ func newParser(log logger.Log, source logger.Source, lexer js_lexer.Lexer, optio
jsxRuntimeImports: make(map[string]ast.LocRef),
jsxLegacyImports: make(map[string]ast.LocRef),
+ // Add "/* @__KEY__ */" comments when mangling properties to support
+ // running esbuild (or other tools like Terser) again on the output.
+ // This checks both "--mangle-props" and "--reserve-props" so that
+ // you can turn this on with just "--reserve-props=." if you want to.
+ shouldAddKeyComment: options.mangleProps != nil || options.reserveProps != nil,
+
suppressWarningsAboutWeirdCode: helpers.IsInsideNodeModules(source.KeyPath.Text),
}
@@ -17379,7 +17716,7 @@ func GlobResolveAST(log logger.Log, source logger.Source, importRecords []ast.Im
return p.toAST([]js_ast.Part{nsExportPart}, []js_ast.Part{part}, nil, "", nil)
}
-func ParseDefineExprOrJSON(text string) (config.DefineExpr, js_ast.E) {
+func ParseDefineExpr(text string) (config.DefineExpr, js_ast.E) {
if text == "" {
return config.DefineExpr{}, nil
}
@@ -17405,16 +17742,18 @@ func ParseDefineExprOrJSON(text string) (config.DefineExpr, js_ast.E) {
return config.DefineExpr{Parts: parts}, nil
}
- // Try parsing a JSON value
+ // Try parsing a value
log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, nil)
- expr, ok := ParseJSON(log, logger.Source{Contents: text}, JSONOptions{})
+ expr, ok := ParseJSON(log, logger.Source{Contents: text}, JSONOptions{
+ IsForDefine: true,
+ })
if !ok {
return config.DefineExpr{}, nil
}
// Only primitive literals are inlined directly
switch expr.Data.(type) {
- case *js_ast.ENull, *js_ast.EBoolean, *js_ast.EString, *js_ast.ENumber:
+ case *js_ast.ENull, *js_ast.EBoolean, *js_ast.EString, *js_ast.ENumber, *js_ast.EBigInt:
return config.DefineExpr{Constant: expr.Data}, nil
}
@@ -17544,7 +17883,7 @@ func (p *parser) prepareForVisitPass() {
if p.options.jsx.AutomaticRuntime {
p.log.AddID(logger.MsgID_JS_UnsupportedJSXComment, logger.Warning, &p.tracker, jsxFactory.Range,
"The JSX factory cannot be set when using React's \"automatic\" JSX transform")
- } else if expr, _ := ParseDefineExprOrJSON(jsxFactory.Text); len(expr.Parts) > 0 {
+ } else if expr, _ := ParseDefineExpr(jsxFactory.Text); len(expr.Parts) > 0 {
p.options.jsx.Factory = expr
} else {
p.log.AddID(logger.MsgID_JS_UnsupportedJSXComment, logger.Warning, &p.tracker, jsxFactory.Range,
@@ -17556,7 +17895,7 @@ func (p *parser) prepareForVisitPass() {
if p.options.jsx.AutomaticRuntime {
p.log.AddID(logger.MsgID_JS_UnsupportedJSXComment, logger.Warning, &p.tracker, jsxFragment.Range,
"The JSX fragment cannot be set when using React's \"automatic\" JSX transform")
- } else if expr, _ := ParseDefineExprOrJSON(jsxFragment.Text); len(expr.Parts) > 0 || expr.Constant != nil {
+ } else if expr, _ := ParseDefineExpr(jsxFragment.Text); len(expr.Parts) > 0 || expr.Constant != nil {
p.options.jsx.Fragment = expr
} else {
p.log.AddID(logger.MsgID_JS_UnsupportedJSXComment, logger.Warning, &p.tracker, jsxFragment.Range,
diff --git a/internal/js_parser/js_parser_lower.go b/internal/js_parser/js_parser_lower.go
index 3991058e178..22ea92bd045 100644
--- a/internal/js_parser/js_parser_lower.go
+++ b/internal/js_parser/js_parser_lower.go
@@ -89,9 +89,13 @@ func (p *parser) markSyntaxFeature(feature compat.JSFeature, r logger.Range) (di
return
case compat.Bigint:
- // Transforming these will never be supported
- p.log.AddError(&p.tracker, r, fmt.Sprintf(
- "Big integer literals are not available in %s", where))
+ // This can't be polyfilled
+ kind := logger.Warning
+ if p.suppressWarningsAboutWeirdCode || p.fnOrArrowDataVisit.tryBodyCount > 0 {
+ kind = logger.Debug
+ }
+ p.log.AddID(logger.MsgID_JS_BigInt, kind, &p.tracker, r, fmt.Sprintf(
+ "Big integer literals are not available in %s and may crash at run-time", where))
return
case compat.ImportMeta:
diff --git a/internal/js_parser/js_parser_lower_class.go b/internal/js_parser/js_parser_lower_class.go
index 944b0e42509..396e3e21227 100644
--- a/internal/js_parser/js_parser_lower_class.go
+++ b/internal/js_parser/js_parser_lower_class.go
@@ -416,8 +416,7 @@ func (p *parser) computeClassLoweringInfo(class *js_ast.Class) (result classLowe
// due to the complexity of the decorator specification. The specification is
// also still evolving so trying to optimize it now is also potentially
// premature.
- if p.options.unsupportedJSFeatures.Has(compat.Decorators) &&
- (!p.options.ts.Parse || p.options.ts.Config.ExperimentalDecorators != config.True) {
+ if class.ShouldLowerStandardDecorators {
for _, prop := range class.Properties {
if len(prop.Decorators) > 0 {
for _, prop := range class.Properties {
@@ -896,6 +895,11 @@ func (ctx *lowerClassContext) lowerField(
memberExpr = p.callRuntime(loc, "__privateAdd", args)
p.recordUsage(ref)
} else if private == nil && ctx.class.UseDefineForClassFields {
+ if p.shouldAddKeyComment {
+ if str, ok := prop.Key.Data.(*js_ast.EString); ok {
+ str.HasPropertyKeyComment = true
+ }
+ }
args := []js_ast.Expr{target, prop.Key}
if _, ok := init.Data.(*js_ast.EUndefined); !ok {
args = append(args, init)
@@ -1108,7 +1112,7 @@ func (ctx *lowerClassContext) analyzeProperty(p *parser, prop js_ast.Property, c
analysis.private, _ = prop.Key.Data.(*js_ast.EPrivateIdentifier)
mustLowerPrivate := analysis.private != nil && p.privateSymbolNeedsToBeLowered(analysis.private)
analysis.shouldOmitFieldInitializer = p.options.ts.Parse && !prop.Kind.IsMethodDefinition() && prop.InitializerOrNil.Data == nil &&
- !ctx.class.UseDefineForClassFields && !mustLowerPrivate
+ !ctx.class.UseDefineForClassFields && !mustLowerPrivate && !ctx.class.ShouldLowerStandardDecorators
// Class fields must be lowered if the environment doesn't support them
if !prop.Kind.IsMethodDefinition() {
@@ -1140,7 +1144,7 @@ func (ctx *lowerClassContext) analyzeProperty(p *parser, prop js_ast.Property, c
// they will end up being lowered (if they are even being lowered at all)
if p.options.ts.Parse && p.options.ts.Config.ExperimentalDecorators == config.True {
analysis.propExperimentalDecorators = prop.Decorators
- } else if p.options.unsupportedJSFeatures.Has(compat.Decorators) {
+ } else if ctx.class.ShouldLowerStandardDecorators {
analysis.propDecorators = prop.Decorators
}
@@ -1451,7 +1455,7 @@ func (ctx *lowerClassContext) processProperties(p *parser, classLoweringInfo cla
propertyKeyTempRefs, decoratorTempRefs := ctx.hoistComputedProperties(p, classLoweringInfo)
// Save the initializer index for each field and accessor element
- if p.options.unsupportedJSFeatures.Has(compat.Decorators) && (!p.options.ts.Parse || p.options.ts.Config.ExperimentalDecorators != config.True) {
+ if ctx.class.ShouldLowerStandardDecorators {
var counts [4]int
// Count how many initializers there are in each section
@@ -1484,8 +1488,7 @@ func (ctx *lowerClassContext) processProperties(p *parser, classLoweringInfo cla
}
// Evaluate the decorator expressions inline
- if p.options.unsupportedJSFeatures.Has(compat.Decorators) && len(ctx.class.Decorators) > 0 &&
- (!p.options.ts.Parse || p.options.ts.Config.ExperimentalDecorators != config.True) {
+ if ctx.class.ShouldLowerStandardDecorators && len(ctx.class.Decorators) > 0 {
name := ctx.nameToKeep
if name == "" {
name = "class"
@@ -2079,7 +2082,7 @@ func (ctx *lowerClassContext) finishAndGenerateCode(p *parser, result visitClass
if p.options.ts.Parse && p.options.ts.Config.ExperimentalDecorators == config.True {
classExperimentalDecorators = ctx.class.Decorators
ctx.class.Decorators = nil
- } else if p.options.unsupportedJSFeatures.Has(compat.Decorators) {
+ } else if ctx.class.ShouldLowerStandardDecorators {
classDecorators = ctx.decoratorClassDecorators
}
diff --git a/internal/js_parser/js_parser_lower_test.go b/internal/js_parser/js_parser_lower_test.go
index e6f1e2ccc2b..728ac39c975 100644
--- a/internal/js_parser/js_parser_lower_test.go
+++ b/internal/js_parser/js_parser_lower_test.go
@@ -722,6 +722,61 @@ func TestLowerOptionalCatchBinding(t *testing.T) {
expectPrintedTarget(t, 2018, "try {} catch {}", "try {\n} catch (e) {\n}\n")
}
+func TestLowerBigInt(t *testing.T) {
+ expectPrintedTarget(t, 2019, "x = 0n", "x = /* @__PURE__ */ BigInt(\"0\");\n")
+ expectPrintedTarget(t, 2020, "x = 0n", "x = 0n;\n")
+
+ expectPrintedTarget(t, 2019, "x = 0b100101n", "x = /* @__PURE__ */ BigInt(\"0b100101\");\n")
+ expectPrintedTarget(t, 2019, "x = 0B100101n", "x = /* @__PURE__ */ BigInt(\"0B100101\");\n")
+ expectPrintedTarget(t, 2019, "x = 0o76543210n", "x = /* @__PURE__ */ BigInt(\"0o76543210\");\n")
+ expectPrintedTarget(t, 2019, "x = 0O76543210n", "x = /* @__PURE__ */ BigInt(\"0O76543210\");\n")
+ expectPrintedTarget(t, 2019, "x = 0xFEDCBA9876543210n", "x = /* @__PURE__ */ BigInt(\"0xFEDCBA9876543210\");\n")
+ expectPrintedTarget(t, 2019, "x = 0XFEDCBA9876543210n", "x = /* @__PURE__ */ BigInt(\"0XFEDCBA9876543210\");\n")
+ expectPrintedTarget(t, 2019, "x = 0xb0ba_cafe_f00dn", "x = /* @__PURE__ */ BigInt(\"0xb0bacafef00d\");\n")
+ expectPrintedTarget(t, 2019, "x = 0xB0BA_CAFE_F00Dn", "x = /* @__PURE__ */ BigInt(\"0xB0BACAFEF00D\");\n")
+ expectPrintedTarget(t, 2019, "x = 102030405060708090807060504030201n", "x = /* @__PURE__ */ BigInt(\"102030405060708090807060504030201\");\n")
+
+ expectPrintedTarget(t, 2019, "x = {0b100101n: 0}", "x = { \"37\": 0 };\n")
+ expectPrintedTarget(t, 2019, "x = {0B100101n: 0}", "x = { \"37\": 0 };\n")
+ expectPrintedTarget(t, 2019, "x = {0o76543210n: 0}", "x = { \"16434824\": 0 };\n")
+ expectPrintedTarget(t, 2019, "x = {0O76543210n: 0}", "x = { \"16434824\": 0 };\n")
+ expectPrintedTarget(t, 2019, "x = {0xFEDCBA9876543210n: 0}", "x = { \"18364758544493064720\": 0 };\n")
+ expectPrintedTarget(t, 2019, "x = {0XFEDCBA9876543210n: 0}", "x = { \"18364758544493064720\": 0 };\n")
+ expectPrintedTarget(t, 2019, "x = {0xb0ba_cafe_f00dn: 0}", "x = { \"194316316110861\": 0 };\n")
+ expectPrintedTarget(t, 2019, "x = {0xB0BA_CAFE_F00Dn: 0}", "x = { \"194316316110861\": 0 };\n")
+ expectPrintedTarget(t, 2019, "x = {102030405060708090807060504030201n: 0}", "x = { \"102030405060708090807060504030201\": 0 };\n")
+
+ expectPrintedTarget(t, 2019, "({0b100101n: x} = y)", "({ \"37\": x } = y);\n")
+ expectPrintedTarget(t, 2019, "({0B100101n: x} = y)", "({ \"37\": x } = y);\n")
+ expectPrintedTarget(t, 2019, "({0o76543210n: x} = y)", "({ \"16434824\": x } = y);\n")
+ expectPrintedTarget(t, 2019, "({0O76543210n: x} = y)", "({ \"16434824\": x } = y);\n")
+ expectPrintedTarget(t, 2019, "({0xFEDCBA9876543210n: x} = y)", "({ \"18364758544493064720\": x } = y);\n")
+ expectPrintedTarget(t, 2019, "({0XFEDCBA9876543210n: x} = y)", "({ \"18364758544493064720\": x } = y);\n")
+ expectPrintedTarget(t, 2019, "({0xb0ba_cafe_f00dn: x} = y)", "({ \"194316316110861\": x } = y);\n")
+ expectPrintedTarget(t, 2019, "({0xB0BA_CAFE_F00Dn: x} = y)", "({ \"194316316110861\": x } = y);\n")
+ expectPrintedTarget(t, 2019, "({102030405060708090807060504030201n: x} = y)", "({ \"102030405060708090807060504030201\": x } = y);\n")
+
+ expectPrintedMangleTarget(t, 2019, "x = {0b100101n: 0}", "x = { 37: 0 };\n")
+ expectPrintedMangleTarget(t, 2019, "x = {0B100101n: 0}", "x = { 37: 0 };\n")
+ expectPrintedMangleTarget(t, 2019, "x = {0o76543210n: 0}", "x = { 16434824: 0 };\n")
+ expectPrintedMangleTarget(t, 2019, "x = {0O76543210n: 0}", "x = { 16434824: 0 };\n")
+ expectPrintedMangleTarget(t, 2019, "x = {0xFEDCBA9876543210n: 0}", "x = { \"18364758544493064720\": 0 };\n")
+ expectPrintedMangleTarget(t, 2019, "x = {0XFEDCBA9876543210n: 0}", "x = { \"18364758544493064720\": 0 };\n")
+ expectPrintedMangleTarget(t, 2019, "x = {0xb0ba_cafe_f00dn: 0}", "x = { \"194316316110861\": 0 };\n")
+ expectPrintedMangleTarget(t, 2019, "x = {0xB0BA_CAFE_F00Dn: 0}", "x = { \"194316316110861\": 0 };\n")
+ expectPrintedMangleTarget(t, 2019, "x = {102030405060708090807060504030201n: 0}", "x = { \"102030405060708090807060504030201\": 0 };\n")
+
+ expectPrintedMangleTarget(t, 2019, "({0b100101n: x} = y)", "({ 37: x } = y);\n")
+ expectPrintedMangleTarget(t, 2019, "({0B100101n: x} = y)", "({ 37: x } = y);\n")
+ expectPrintedMangleTarget(t, 2019, "({0o76543210n: x} = y)", "({ 16434824: x } = y);\n")
+ expectPrintedMangleTarget(t, 2019, "({0O76543210n: x} = y)", "({ 16434824: x } = y);\n")
+ expectPrintedMangleTarget(t, 2019, "({0xFEDCBA9876543210n: x} = y)", "({ \"18364758544493064720\": x } = y);\n")
+ expectPrintedMangleTarget(t, 2019, "({0XFEDCBA9876543210n: x} = y)", "({ \"18364758544493064720\": x } = y);\n")
+ expectPrintedMangleTarget(t, 2019, "({0xb0ba_cafe_f00dn: x} = y)", "({ \"194316316110861\": x } = y);\n")
+ expectPrintedMangleTarget(t, 2019, "({0xB0BA_CAFE_F00Dn: x} = y)", "({ \"194316316110861\": x } = y);\n")
+ expectPrintedMangleTarget(t, 2019, "({102030405060708090807060504030201n: x} = y)", "({ \"102030405060708090807060504030201\": x } = y);\n")
+}
+
func TestLowerExportStarAs(t *testing.T) {
expectPrintedTarget(t, 2020, "export * as ns from 'path'", "export * as ns from \"path\";\n")
expectPrintedTarget(t, 2019, "export * as ns from 'path'", "import * as ns from \"path\";\nexport { ns };\n")
diff --git a/internal/js_parser/js_parser_test.go b/internal/js_parser/js_parser_test.go
index 5efa385ab6d..91394fbead9 100644
--- a/internal/js_parser/js_parser_test.go
+++ b/internal/js_parser/js_parser_test.go
@@ -230,6 +230,13 @@ func expectPrintedJSXAutomatic(t *testing.T, options JSXAutomaticTestOptions, co
})
}
+func TestUnOp(t *testing.T) {
+ // This was important to someone for a very obscure reason. See
+ // https://github.com/evanw/esbuild/issues/4041 for more info.
+ expectPrinted(t, "let x; void 0; x", "let x;\nx;\n")
+ expectPrinted(t, "let x; void x; x", "let x;\nvoid x;\nx;\n")
+}
+
func TestBinOp(t *testing.T) {
for code, entry := range js_ast.OpTable {
opCode := js_ast.OpCode(code)
@@ -1372,6 +1379,14 @@ func TestObject(t *testing.T) {
expectPrintedMangle(t, "x = { '2147483648': y }", "x = { \"2147483648\": y };\n")
expectPrintedMangle(t, "x = { '-2147483648': y }", "x = { \"-2147483648\": y };\n")
expectPrintedMangle(t, "x = { '-2147483649': y }", "x = { \"-2147483649\": y };\n")
+
+ // See: https://github.com/microsoft/TypeScript/pull/60225
+ expectPrinted(t, "x = { get \n x() {} }", "x = { get x() {\n} };\n")
+ expectPrinted(t, "x = { set \n x(_) {} }", "x = { set x(_) {\n} };\n")
+ expectParseError(t, "x = { get \n *x() {} }", ": ERROR: Expected \"}\" but found \"*\"\n")
+ expectParseError(t, "x = { set \n *x(_) {} }", ": ERROR: Expected \"}\" but found \"*\"\n")
+ expectParseError(t, "x = { get \n async x() {} }", ": ERROR: Expected \"(\" but found \"x\"\n")
+ expectParseError(t, "x = { set \n async x(_) {} }", ": ERROR: Expected \"(\" but found \"x\"\n")
}
func TestComputedProperty(t *testing.T) {
@@ -1797,6 +1812,16 @@ func TestClass(t *testing.T) {
// Make sure direct "eval" doesn't cause the class name to change
expectPrinted(t, "class Foo { foo = [Foo, eval(bar)] }", "class Foo {\n foo = [Foo, eval(bar)];\n}\n")
+
+ // See: https://github.com/microsoft/TypeScript/pull/60225
+ expectPrinted(t, "class A { get \n x() {} }", "class A {\n get x() {\n }\n}\n")
+ expectPrinted(t, "class A { set \n x(_) {} }", "class A {\n set x(_) {\n }\n}\n")
+ expectPrinted(t, "class A { get \n *x() {} }", "class A {\n get;\n *x() {\n }\n}\n")
+ expectPrinted(t, "class A { set \n *x(_) {} }", "class A {\n set;\n *x(_) {\n }\n}\n")
+ expectParseError(t, "class A { get \n async x() {} }", ": ERROR: Expected \"(\" but found \"x\"\n")
+ expectParseError(t, "class A { set \n async x(_) {} }", ": ERROR: Expected \"(\" but found \"x\"\n")
+ expectParseError(t, "class A { async get \n *x() {} }", ": ERROR: Expected \"(\" but found \"*\"\n")
+ expectParseError(t, "class A { async set \n *x(_) {} }", ": ERROR: Expected \"(\" but found \"*\"\n")
}
func TestSuperCall(t *testing.T) {
@@ -2185,8 +2210,8 @@ __privateAdd(Foo, _x, __runInitializers(_init, 8, Foo)), __runInitializers(_init
func TestGenerator(t *testing.T) {
expectParseError(t, "(class { * foo })", ": ERROR: Expected \"(\" but found \"}\"\n")
expectParseError(t, "(class { * *foo() {} })", ": ERROR: Unexpected \"*\"\n")
- expectParseError(t, "(class { get*foo() {} })", ": ERROR: Unexpected \"*\"\n")
- expectParseError(t, "(class { set*foo() {} })", ": ERROR: Unexpected \"*\"\n")
+ expectParseError(t, "(class { get*foo() {} })", ": ERROR: Expected \";\" but found \"*\"\n")
+ expectParseError(t, "(class { set*foo() {} })", ": ERROR: Expected \";\" but found \"*\"\n")
expectParseError(t, "(class { *get foo() {} })", ": ERROR: Expected \"(\" but found \"foo\"\n")
expectParseError(t, "(class { *set foo() {} })", ": ERROR: Expected \"(\" but found \"foo\"\n")
expectParseError(t, "(class { *static foo() {} })", ": ERROR: Expected \"(\" but found \"foo\"\n")
@@ -2751,6 +2776,43 @@ func TestSwitch(t *testing.T) {
expectPrinted(t, "switch (x) { default: }", "switch (x) {\n default:\n}\n")
expectPrinted(t, "switch ((x => x + 1)(0)) { case 1: var y } y = 2", "switch (((x) => x + 1)(0)) {\n case 1:\n var y;\n}\ny = 2;\n")
expectParseError(t, "switch (x) { default: default: }", ": ERROR: Multiple default clauses are not allowed\n")
+
+ expectPrintedMangle(t, "switch (x) {}", "x;\n")
+ expectPrintedMangle(t, "switch (x) { case x: a(); break; case y: b(); break }", "switch (x) {\n case x:\n a();\n break;\n case y:\n b();\n break;\n}\n")
+
+ expectPrintedMangle(t, "switch (0) { default: a() }", "a();\n")
+ expectPrintedMangle(t, "switch (x) { default: a() }", "switch (x) {\n default:\n a();\n}\n")
+
+ expectPrintedMangle(t, "switch (0) { case 0: a(); break; case 1: b(); break }", "a();\n")
+ expectPrintedMangle(t, "switch (1) { case 0: a(); break; case 1: b(); break }", "b();\n")
+ expectPrintedMangle(t, "switch (2) { case 0: a(); break; case 1: b(); break }", "")
+
+ expectPrintedMangle(t, "switch (0) { case 0: a(); case 1: b(); break }", "switch (0) {\n case 0:\n a();\n case 1:\n b();\n break;\n}\n")
+ expectPrintedMangle(t, "switch (1) { case 0: a(); case 1: b(); break }", "b();\n")
+ expectPrintedMangle(t, "switch (2) { case 0: a(); case 1: b(); break }", "")
+
+ expectPrintedMangle(t, "switch (0) { case 0: a(); break; default: b(); break }", "a();\n")
+ expectPrintedMangle(t, "switch (1) { case 0: a(); break; default: b(); break }", "b();\n")
+
+ expectPrintedMangle(t, "switch (0) { case 0: { a(); break; } case 1: b(); break }", "a();\n")
+ expectPrintedMangle(t, "switch (0) { case 0: { var x = a(); break; } case 1: b(); break }", "var x = a();\n")
+ expectPrintedMangle(t, "switch (0) { case 0: { let x = a(); break; } case 1: b(); break }", "{\n let x = a();\n}\n")
+ expectPrintedMangle(t, "switch (0) { case 0: { const x = a(); break; } case 1: b(); break }", "{\n const x = a();\n}\n")
+
+ expectPrintedMangle(t, "for (x of y) switch (0) { case 0: a(); continue; default: b(); continue }", "for (x of y) a();\n")
+ expectPrintedMangle(t, "for (x of y) switch (1) { case 0: a(); continue; default: b(); continue }", "for (x of y) b();\n")
+
+ expectPrintedMangle(t, "for (x of y) switch (0) { case 0: throw a(); default: throw b() }", "for (x of y) throw a();\n")
+ expectPrintedMangle(t, "for (x of y) switch (1) { case 0: throw a(); default: throw b() }", "for (x of y) throw b();\n")
+
+ expectPrintedMangle(t, "for (x of y) switch (0) { case 0: return a(); default: return b() }", "for (x of y) return a();\n")
+ expectPrintedMangle(t, "for (x of y) switch (1) { case 0: return a(); default: return b() }", "for (x of y) return b();\n")
+
+ expectPrintedMangle(t, "z: for (x of y) switch (0) { case 0: a(); break z; default: b(); break z }", "z: for (x of y) switch (0) {\n case 0:\n a();\n break z;\n}\n")
+ expectPrintedMangle(t, "z: for (x of y) switch (1) { case 0: a(); break z; default: b(); break z }", "z: for (x of y) switch (1) {\n default:\n b();\n break z;\n}\n")
+
+ expectPrintedMangle(t, "for (x of y) z: switch (0) { case 0: a(); break z; default: b(); break z }", "for (x of y) z: switch (0) {\n case 0:\n a();\n break z;\n}\n")
+ expectPrintedMangle(t, "for (x of y) z: switch (1) { case 0: a(); break z; default: b(); break z }", "for (x of y) z: switch (1) {\n default:\n b();\n break z;\n}\n")
}
func TestConstantFolding(t *testing.T) {
@@ -2835,14 +2897,14 @@ func TestConstantFolding(t *testing.T) {
expectPrinted(t, "x = 1 === 1", "x = true;\n")
expectPrinted(t, "x = 1 === 2", "x = false;\n")
- expectPrinted(t, "x = 1 === '1'", "x = 1 === \"1\";\n")
+ expectPrinted(t, "x = 1 === '1'", "x = false;\n")
expectPrinted(t, "x = 1 == 1", "x = true;\n")
expectPrinted(t, "x = 1 == 2", "x = false;\n")
expectPrinted(t, "x = 1 == '1'", "x = 1 == \"1\";\n")
expectPrinted(t, "x = 1 !== 1", "x = false;\n")
expectPrinted(t, "x = 1 !== 2", "x = true;\n")
- expectPrinted(t, "x = 1 !== '1'", "x = 1 !== \"1\";\n")
+ expectPrinted(t, "x = 1 !== '1'", "x = true;\n")
expectPrinted(t, "x = 1 != 1", "x = false;\n")
expectPrinted(t, "x = 1 != 2", "x = true;\n")
expectPrinted(t, "x = 1 != '1'", "x = 1 != \"1\";\n")
@@ -2907,6 +2969,8 @@ func TestConstantFolding(t *testing.T) {
expectPrinted(t, "x = 0n !== 1n", "x = true;\n")
expectPrinted(t, "x = 0n !== 0n", "x = false;\n")
expectPrinted(t, "x = 123n === 1_2_3n", "x = true;\n")
+ expectPrinted(t, "x = 0n === '1n'", "x = false;\n")
+ expectPrinted(t, "x = 0n !== '1n'", "x = true;\n")
expectPrinted(t, "x = 0n === 0b0n", "x = 0n === 0b0n;\n")
expectPrinted(t, "x = 0n === 0o0n", "x = 0n === 0o0n;\n")
@@ -3729,8 +3793,12 @@ func TestMangleBooleanConstructor(t *testing.T) {
expectPrintedNormalAndMangle(t, "a = Boolean(b ? c > 0 : c < 0)", "a = Boolean(b ? c > 0 : c < 0);\n", "a = b ? c > 0 : c < 0;\n")
// Check for calling "SimplifyBooleanExpr" on the argument
- expectPrintedNormalAndMangle(t, "a = Boolean((b | c) !== 0)", "a = Boolean((b | c) !== 0);\n", "a = !!(b | c);\n")
- expectPrintedNormalAndMangle(t, "a = Boolean(b ? (c | d) !== 0 : (d | e) !== 0)", "a = Boolean(b ? (c | d) !== 0 : (d | e) !== 0);\n", "a = !!(b ? c | d : d | e);\n")
+ expectPrintedNormalAndMangle(t, "a = Boolean((b | c) !== 0)", "a = Boolean((b | c) !== 0);\n", "a = (b | c) !== 0;\n")
+ expectPrintedNormalAndMangle(t, "a = Boolean((b >>> c) !== 0)", "a = Boolean(b >>> c !== 0);\n", "a = !!(b >>> c);\n")
+ expectPrintedNormalAndMangle(t, "a = Boolean(b ? (c | d) !== 0 : (d | e) !== 0)",
+ "a = Boolean(b ? (c | d) !== 0 : (d | e) !== 0);\n", "a = b ? (c | d) !== 0 : (d | e) !== 0;\n")
+ expectPrintedNormalAndMangle(t, "a = Boolean(b ? (c >>> d) !== 0 : (d >>> e) !== 0)",
+ "a = Boolean(b ? c >>> d !== 0 : d >>> e !== 0);\n", "a = !!(b ? c >>> d : d >>> e);\n")
}
func TestMangleNumberConstructor(t *testing.T) {
@@ -4077,44 +4145,44 @@ func TestMangleIf(t *testing.T) {
expectPrintedNormalAndMangle(t, "if (!!a ? !!b : !!c) throw 0", "if (!!a ? !!b : !!c) throw 0;\n", "if (a ? b : c) throw 0;\n")
expectPrintedNormalAndMangle(t, "if ((a + b) !== 0) throw 0", "if (a + b !== 0) throw 0;\n", "if (a + b !== 0) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a | b) !== 0) throw 0", "if ((a | b) !== 0) throw 0;\n", "if (a | b) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a & b) !== 0) throw 0", "if ((a & b) !== 0) throw 0;\n", "if (a & b) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a ^ b) !== 0) throw 0", "if ((a ^ b) !== 0) throw 0;\n", "if (a ^ b) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a << b) !== 0) throw 0", "if (a << b !== 0) throw 0;\n", "if (a << b) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a >> b) !== 0) throw 0", "if (a >> b !== 0) throw 0;\n", "if (a >> b) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a | b) !== 0) throw 0", "if ((a | b) !== 0) throw 0;\n", "if ((a | b) !== 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a & b) !== 0) throw 0", "if ((a & b) !== 0) throw 0;\n", "if ((a & b) !== 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a ^ b) !== 0) throw 0", "if ((a ^ b) !== 0) throw 0;\n", "if ((a ^ b) !== 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a << b) !== 0) throw 0", "if (a << b !== 0) throw 0;\n", "if (a << b !== 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a >> b) !== 0) throw 0", "if (a >> b !== 0) throw 0;\n", "if (a >> b !== 0) throw 0;\n")
expectPrintedNormalAndMangle(t, "if ((a >>> b) !== 0) throw 0", "if (a >>> b !== 0) throw 0;\n", "if (a >>> b) throw 0;\n")
expectPrintedNormalAndMangle(t, "if (+a !== 0) throw 0", "if (+a !== 0) throw 0;\n", "if (+a != 0) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (~a !== 0) throw 0", "if (~a !== 0) throw 0;\n", "if (~a) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (~a !== 0) throw 0", "if (~a !== 0) throw 0;\n", "if (~a !== 0) throw 0;\n")
expectPrintedNormalAndMangle(t, "if (0 != (a + b)) throw 0", "if (0 != a + b) throw 0;\n", "if (a + b != 0) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 != (a | b)) throw 0", "if (0 != (a | b)) throw 0;\n", "if (a | b) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 != (a & b)) throw 0", "if (0 != (a & b)) throw 0;\n", "if (a & b) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 != (a ^ b)) throw 0", "if (0 != (a ^ b)) throw 0;\n", "if (a ^ b) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 != (a << b)) throw 0", "if (0 != a << b) throw 0;\n", "if (a << b) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 != (a >> b)) throw 0", "if (0 != a >> b) throw 0;\n", "if (a >> b) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 != (a | b)) throw 0", "if (0 != (a | b)) throw 0;\n", "if ((a | b) != 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 != (a & b)) throw 0", "if (0 != (a & b)) throw 0;\n", "if ((a & b) != 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 != (a ^ b)) throw 0", "if (0 != (a ^ b)) throw 0;\n", "if ((a ^ b) != 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 != (a << b)) throw 0", "if (0 != a << b) throw 0;\n", "if (a << b != 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 != (a >> b)) throw 0", "if (0 != a >> b) throw 0;\n", "if (a >> b != 0) throw 0;\n")
expectPrintedNormalAndMangle(t, "if (0 != (a >>> b)) throw 0", "if (0 != a >>> b) throw 0;\n", "if (a >>> b) throw 0;\n")
expectPrintedNormalAndMangle(t, "if (0 != +a) throw 0", "if (0 != +a) throw 0;\n", "if (+a != 0) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 != ~a) throw 0", "if (0 != ~a) throw 0;\n", "if (~a) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 != ~a) throw 0", "if (0 != ~a) throw 0;\n", "if (~a != 0) throw 0;\n")
expectPrintedNormalAndMangle(t, "if ((a + b) === 0) throw 0", "if (a + b === 0) throw 0;\n", "if (a + b === 0) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a | b) === 0) throw 0", "if ((a | b) === 0) throw 0;\n", "if (!(a | b)) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a & b) === 0) throw 0", "if ((a & b) === 0) throw 0;\n", "if (!(a & b)) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a ^ b) === 0) throw 0", "if ((a ^ b) === 0) throw 0;\n", "if (!(a ^ b)) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a << b) === 0) throw 0", "if (a << b === 0) throw 0;\n", "if (!(a << b)) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if ((a >> b) === 0) throw 0", "if (a >> b === 0) throw 0;\n", "if (!(a >> b)) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a | b) === 0) throw 0", "if ((a | b) === 0) throw 0;\n", "if ((a | b) === 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a & b) === 0) throw 0", "if ((a & b) === 0) throw 0;\n", "if ((a & b) === 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a ^ b) === 0) throw 0", "if ((a ^ b) === 0) throw 0;\n", "if ((a ^ b) === 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a << b) === 0) throw 0", "if (a << b === 0) throw 0;\n", "if (a << b === 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if ((a >> b) === 0) throw 0", "if (a >> b === 0) throw 0;\n", "if (a >> b === 0) throw 0;\n")
expectPrintedNormalAndMangle(t, "if ((a >>> b) === 0) throw 0", "if (a >>> b === 0) throw 0;\n", "if (!(a >>> b)) throw 0;\n")
expectPrintedNormalAndMangle(t, "if (+a === 0) throw 0", "if (+a === 0) throw 0;\n", "if (+a == 0) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (~a === 0) throw 0", "if (~a === 0) throw 0;\n", "if (!~a) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (~a === 0) throw 0", "if (~a === 0) throw 0;\n", "if (~a === 0) throw 0;\n")
expectPrintedNormalAndMangle(t, "if (0 == (a + b)) throw 0", "if (0 == a + b) throw 0;\n", "if (a + b == 0) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 == (a | b)) throw 0", "if (0 == (a | b)) throw 0;\n", "if (!(a | b)) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 == (a & b)) throw 0", "if (0 == (a & b)) throw 0;\n", "if (!(a & b)) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 == (a ^ b)) throw 0", "if (0 == (a ^ b)) throw 0;\n", "if (!(a ^ b)) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 == (a << b)) throw 0", "if (0 == a << b) throw 0;\n", "if (!(a << b)) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 == (a >> b)) throw 0", "if (0 == a >> b) throw 0;\n", "if (!(a >> b)) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 == (a | b)) throw 0", "if (0 == (a | b)) throw 0;\n", "if ((a | b) == 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 == (a & b)) throw 0", "if (0 == (a & b)) throw 0;\n", "if ((a & b) == 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 == (a ^ b)) throw 0", "if (0 == (a ^ b)) throw 0;\n", "if ((a ^ b) == 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 == (a << b)) throw 0", "if (0 == a << b) throw 0;\n", "if (a << b == 0) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 == (a >> b)) throw 0", "if (0 == a >> b) throw 0;\n", "if (a >> b == 0) throw 0;\n")
expectPrintedNormalAndMangle(t, "if (0 == (a >>> b)) throw 0", "if (0 == a >>> b) throw 0;\n", "if (!(a >>> b)) throw 0;\n")
expectPrintedNormalAndMangle(t, "if (0 == +a) throw 0", "if (0 == +a) throw 0;\n", "if (+a == 0) throw 0;\n")
- expectPrintedNormalAndMangle(t, "if (0 == ~a) throw 0", "if (0 == ~a) throw 0;\n", "if (!~a) throw 0;\n")
+ expectPrintedNormalAndMangle(t, "if (0 == ~a) throw 0", "if (0 == ~a) throw 0;\n", "if (~a == 0) throw 0;\n")
}
func TestMangleWrapToAvoidAmbiguousElse(t *testing.T) {
@@ -6530,6 +6598,27 @@ func TestMangleCatch(t *testing.T) {
expectPrintedMangle(t, "if (y) try { throw 1 } catch (x) {} else eval('x')", "if (y) try {\n throw 1;\n} catch {\n}\nelse eval(\"x\");\n")
}
+func TestMangleTry(t *testing.T) {
+ expectPrintedMangle(t, "try { throw 0 } catch (e) { foo() }", "try {\n throw 0;\n} catch {\n foo();\n}\n")
+ expectPrintedMangle(t, "try {} catch (e) { var foo }", "try {\n} catch {\n var foo;\n}\n")
+
+ expectPrintedMangle(t, "try {} catch (e) { foo() }", "")
+ expectPrintedMangle(t, "try {} catch (e) { foo() } finally {}", "")
+
+ expectPrintedMangle(t, "try {} finally { foo() }", "foo();\n")
+ expectPrintedMangle(t, "try {} catch (e) { foo() } finally { bar() }", "bar();\n")
+
+ expectPrintedMangle(t, "try {} finally { var x = foo() }", "var x = foo();\n")
+ expectPrintedMangle(t, "try {} catch (e) { foo() } finally { var x = bar() }", "var x = bar();\n")
+
+ expectPrintedMangle(t, "try {} finally { let x = foo() }", "{\n let x = foo();\n}\n")
+ expectPrintedMangle(t, "try {} catch (e) { foo() } finally { let x = bar() }", "{\n let x = bar();\n}\n")
+
+ // The Kotlin compiler apparently generates code like this.
+ // See https://github.com/evanw/esbuild/issues/4064 for info.
+ expectPrintedMangle(t, "x: try { while (true) ; break x } catch {}", "x: try {\n for (; ; ) ;\n break x;\n} catch {\n}\n")
+}
+
func TestAutoPureForObjectCreate(t *testing.T) {
expectPrinted(t, "Object.create(null)", "/* @__PURE__ */ Object.create(null);\n")
expectPrinted(t, "Object.create({})", "/* @__PURE__ */ Object.create({});\n")
diff --git a/internal/js_parser/json_parser.go b/internal/js_parser/json_parser.go
index 64062caa007..0f8c79a9ed5 100644
--- a/internal/js_parser/json_parser.go
+++ b/internal/js_parser/json_parser.go
@@ -165,6 +165,14 @@ func (p *jsonParser) parseExpr() js_ast.Expr {
CloseBraceLoc: closeBraceLoc,
}}
+ case js_lexer.TBigIntegerLiteral:
+ if !p.options.IsForDefine {
+ p.lexer.Unexpected()
+ }
+ value := p.lexer.Identifier
+ p.lexer.Next()
+ return js_ast.Expr{Loc: loc, Data: &js_ast.EBigInt{Value: value.String}}
+
default:
p.lexer.Unexpected()
return js_ast.Expr{}
@@ -175,6 +183,7 @@ type JSONOptions struct {
UnsupportedJSFeatures compat.JSFeature
Flavor js_lexer.JSONFlavor
ErrorSuffix string
+ IsForDefine bool
}
func ParseJSON(log logger.Log, source logger.Source, options JSONOptions) (result js_ast.Expr, ok bool) {
diff --git a/internal/js_parser/sourcemap_parser.go b/internal/js_parser/sourcemap_parser.go
index c83d76774d8..9a39758925a 100644
--- a/internal/js_parser/sourcemap_parser.go
+++ b/internal/js_parser/sourcemap_parser.go
@@ -2,7 +2,9 @@ package js_parser
import (
"fmt"
+ "net/url"
"sort"
+ "strings"
"github.com/evanw/esbuild/internal/ast"
"github.com/evanw/esbuild/internal/helpers"
@@ -26,10 +28,12 @@ func ParseSourceMap(log logger.Log, source logger.Source) *sourcemap.SourceMap {
}
var sources []string
+ var sourcesArray []js_ast.Expr
var sourcesContent []sourcemap.SourceContent
var names []string
var mappingsRaw []uint16
var mappingsStart int32
+ var sourceRoot string
hasVersion := false
for _, prop := range obj.Properties {
@@ -51,14 +55,18 @@ func ParseSourceMap(log logger.Log, source logger.Source) *sourcemap.SourceMap {
mappingsStart = prop.ValueOrNil.Loc.Start + 1
}
+ case "sourceRoot":
+ if value, ok := prop.ValueOrNil.Data.(*js_ast.EString); ok {
+ sourceRoot = helpers.UTF16ToString(value.Value)
+ }
+
case "sources":
if value, ok := prop.ValueOrNil.Data.(*js_ast.EArray); ok {
- sources = []string{}
- for _, item := range value.Items {
+ sources = make([]string, len(value.Items))
+ sourcesArray = value.Items
+ for i, item := range value.Items {
if element, ok := item.Data.(*js_ast.EString); ok {
- sources = append(sources, helpers.UTF16ToString(element.Value))
- } else {
- sources = append(sources, "")
+ sources[i] = helpers.UTF16ToString(element.Value)
}
}
}
@@ -256,6 +264,44 @@ func ParseSourceMap(log logger.Log, source logger.Source) *sourcemap.SourceMap {
sort.Stable(mappings)
}
+ // Try resolving relative source URLs into absolute source URLs.
+ // See https://tc39.es/ecma426/#resolving-sources for details.
+ var sourceURLPrefix string
+ var baseURL *url.URL
+ if sourceRoot != "" {
+ if index := strings.LastIndexByte(sourceRoot, '/'); index != -1 {
+ sourceURLPrefix = sourceRoot[:index+1]
+ } else {
+ sourceURLPrefix = sourceRoot + "/"
+ }
+ }
+ if source.KeyPath.Namespace == "file" {
+ baseURL = helpers.FileURLFromFilePath(source.KeyPath.Text)
+ }
+ for i, sourcePath := range sources {
+ if sourcePath == "" {
+ continue // Skip null entries
+ }
+ sourcePath = sourceURLPrefix + sourcePath
+ sourceURL, err := url.Parse(sourcePath)
+
+ // Report URL parse errors (such as "%XY" being an invalid escape)
+ if err != nil {
+ if urlErr, ok := err.(*url.Error); ok {
+ err = urlErr.Err // Use the underlying error to reduce noise
+ }
+ log.AddID(logger.MsgID_SourceMap_InvalidSourceURL, logger.Warning, &tracker, source.RangeOfString(sourcesArray[i].Loc),
+ fmt.Sprintf("Invalid source URL: %s", err.Error()))
+ continue
+ }
+
+ // Resolve this URL relative to the enclosing directory
+ if baseURL != nil {
+ sourceURL = baseURL.ResolveReference(sourceURL)
+ }
+ sources[i] = sourceURL.String()
+ }
+
return &sourcemap.SourceMap{
Sources: sources,
SourcesContent: sourcesContent,
diff --git a/internal/js_parser/ts_parser_test.go b/internal/js_parser/ts_parser_test.go
index 20b8483dfb7..5ba653b7969 100644
--- a/internal/js_parser/ts_parser_test.go
+++ b/internal/js_parser/ts_parser_test.go
@@ -820,6 +820,16 @@ func TestTSClass(t *testing.T) {
expectParseErrorTS(t, "class Foo { [foo] }", ": ERROR: Expected \"(\" but found \"}\"\n")
expectParseErrorTS(t, "class Foo { [foo]? }", ": ERROR: Expected \"(\" but found \"}\"\n")
expectParseErrorTS(t, "class Foo { [foo]!() {} }", ": ERROR: Expected \";\" but found \"<\"\n")
+
+ // See: https://github.com/microsoft/TypeScript/pull/60225
+ expectPrintedTS(t, "class A { get \n x() {} }", "class A {\n get x() {\n }\n}\n")
+ expectPrintedTS(t, "class A { set \n x(_) {} }", "class A {\n set x(_) {\n }\n}\n")
+ expectPrintedTS(t, "class A { get \n *x() {} }", "class A {\n get;\n *x() {\n }\n}\n")
+ expectPrintedTS(t, "class A { set \n *x(_) {} }", "class A {\n set;\n *x(_) {\n }\n}\n")
+ expectParseErrorTS(t, "class A { get \n async x() {} }", ": ERROR: Expected \"(\" but found \"x\"\n")
+ expectParseErrorTS(t, "class A { set \n async x(_) {} }", ": ERROR: Expected \"(\" but found \"x\"\n")
+ expectParseErrorTS(t, "class A { async get \n *x() {} }", ": ERROR: Expected \"(\" but found \"*\"\n")
+ expectParseErrorTS(t, "class A { async set \n *x(_) {} }", ": ERROR: Expected \"(\" but found \"*\"\n")
}
func TestTSAutoAccessors(t *testing.T) {
@@ -2297,7 +2307,7 @@ func TestTSArrow(t *testing.T) {
expectPrintedTS(t, "async (): void => {}", "async () => {\n};\n")
expectPrintedTS(t, "async (a): void => {}", "async (a) => {\n};\n")
- expectParseErrorTS(t, "async x: void => {}", ": ERROR: Expected \"=>\" but found \":\"\n")
+ expectParseErrorTS(t, "async x: void => {}", ": ERROR: Expected \";\" but found \"x\"\n")
expectPrintedTS(t, "function foo(x: boolean): asserts x", "")
expectPrintedTS(t, "function foo(x: boolean): asserts", "")
@@ -2321,6 +2331,11 @@ func TestTSArrow(t *testing.T) {
expectParseErrorTargetTS(t, 5, "return check ? (hover = 2, bar) : baz()", "")
expectParseErrorTargetTS(t, 5, "return check ? (hover = 2, bar) => 0 : baz()",
": ERROR: Transforming default arguments to the configured target environment is not supported yet\n")
+
+ // https://github.com/evanw/esbuild/issues/4027
+ expectPrintedTS(t, "function f(async?) { g(async in x) }", "function f(async) {\n g(async in x);\n}\n")
+ expectPrintedTS(t, "function f(async?) { g(async as boolean) }", "function f(async) {\n g(async);\n}\n")
+ expectPrintedTS(t, "function f() { g(async as => boolean) }", "function f() {\n g(async (as) => boolean);\n}\n")
}
func TestTSSuperCall(t *testing.T) {
diff --git a/internal/js_printer/js_printer.go b/internal/js_printer/js_printer.go
index 3c5cab04571..b552fdc270b 100644
--- a/internal/js_printer/js_printer.go
+++ b/internal/js_printer/js_printer.go
@@ -4,6 +4,7 @@ import (
"bytes"
"fmt"
"math"
+ "math/big"
"strconv"
"strings"
"unicode/utf8"
@@ -719,7 +720,8 @@ func (p *printer) printBinding(binding js_ast.Binding) {
p.addSourceMapping(property.Key.Loc)
p.printIdentifierUTF16(str.Value)
} else if mangled, ok := property.Key.Data.(*js_ast.ENameOfSymbol); ok {
- if name := p.mangledPropName(mangled.Ref); p.canPrintIdentifier(name) {
+ name := p.mangledPropName(mangled.Ref)
+ if p.canPrintIdentifier(name) {
p.addSourceMappingForName(property.Key.Loc, name, mangled.Ref)
p.printIdentifier(name)
@@ -1204,7 +1206,8 @@ func (p *printer) printProperty(property js_ast.Property) {
p.printIdentifier(name)
case *js_ast.ENameOfSymbol:
- if name := p.mangledPropName(key.Ref); p.canPrintIdentifier(name) {
+ name := p.mangledPropName(key.Ref)
+ if p.canPrintIdentifier(name) {
p.printSpaceBeforeIdentifier()
p.addSourceMappingForName(property.Key.Loc, name, key.Ref)
p.printIdentifier(name)
@@ -2933,7 +2936,10 @@ func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFla
var inlinedValue js_ast.E
switch e2 := part.Value.Data.(type) {
case *js_ast.ENameOfSymbol:
- inlinedValue = &js_ast.EString{Value: helpers.StringToUTF16(p.mangledPropName(e2.Ref))}
+ inlinedValue = &js_ast.EString{
+ Value: helpers.StringToUTF16(p.mangledPropName(e2.Ref)),
+ HasPropertyKeyComment: e2.HasPropertyKeyComment,
+ }
case *js_ast.EDot:
if value, ok := p.tryToGetImportedEnumValue(e2.Target, e2.Name); ok {
if value.String != nil {
@@ -3043,10 +3049,72 @@ func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFla
}
case *js_ast.EBigInt:
+ if !p.options.UnsupportedFeatures.Has(compat.Bigint) {
+ p.printSpaceBeforeIdentifier()
+ p.addSourceMapping(expr.Loc)
+ p.print(e.Value)
+ p.print("n")
+ break
+ }
+
+ wrap := level >= js_ast.LNew || (flags&forbidCall) != 0
+ hasPureComment := !p.options.MinifyWhitespace
+
+ if hasPureComment && level >= js_ast.LPostfix {
+ wrap = true
+ }
+
+ if wrap {
+ p.print("(")
+ }
+
+ if hasPureComment {
+ flags := p.saveExprStartFlags()
+ p.addSourceMapping(expr.Loc)
+ p.print("/* @__PURE__ */ ")
+ p.restoreExprStartFlags(flags)
+ }
+
+ value := e.Value
+ useQuotes := true
+
+ // When minifying, try to convert to a shorter form
+ if p.options.MinifySyntax {
+ var i big.Int
+ fmt.Sscan(value, &i)
+ str := i.String()
+
+ // Print without quotes if it can be converted exactly
+ if num, err := strconv.ParseFloat(str, 64); err == nil && str == fmt.Sprintf("%.0f", num) {
+ useQuotes = false
+ }
+
+ // Print the converted form if it's shorter (long hex strings may not be shorter)
+ if len(str) < len(value) {
+ value = str
+ }
+ }
+
p.printSpaceBeforeIdentifier()
p.addSourceMapping(expr.Loc)
- p.print(e.Value)
- p.print("n")
+
+ if useQuotes {
+ p.print("BigInt(\"")
+ } else {
+ p.print("BigInt(")
+ }
+
+ p.print(value)
+
+ if useQuotes {
+ p.print("\")")
+ } else {
+ p.print(")")
+ }
+
+ if wrap {
+ p.print(")")
+ }
case *js_ast.ENumber:
p.addSourceMapping(expr.Loc)
diff --git a/internal/js_printer/js_printer_test.go b/internal/js_printer/js_printer_test.go
index a17eb1792ae..13c20c88fd6 100644
--- a/internal/js_printer/js_printer_test.go
+++ b/internal/js_printer/js_printer_test.go
@@ -22,6 +22,9 @@ func expectPrintedCommon(t *testing.T, name string, contents string, expected st
msgs := log.Done()
text := ""
for _, msg := range msgs {
+ if msg.Kind != logger.Error {
+ continue
+ }
text += msg.String(logger.OutputOptions{}, logger.TerminalInfo{})
}
test.AssertEqualWithDiff(t, text, "")
@@ -1150,3 +1153,15 @@ func TestUsing(t *testing.T) {
expectPrintedMinify(t, "await using x = y", "await using x=y;")
expectPrintedMinify(t, "await using x = y, z = _", "await using x=y,z=_;")
}
+
+func TestMinifyBigInt(t *testing.T) {
+ expectPrintedTargetMangle(t, 2019, "x = 0b100101n", "x = /* @__PURE__ */ BigInt(37);\n")
+ expectPrintedTargetMangle(t, 2019, "x = 0B100101n", "x = /* @__PURE__ */ BigInt(37);\n")
+ expectPrintedTargetMangle(t, 2019, "x = 0o76543210n", "x = /* @__PURE__ */ BigInt(16434824);\n")
+ expectPrintedTargetMangle(t, 2019, "x = 0O76543210n", "x = /* @__PURE__ */ BigInt(16434824);\n")
+ expectPrintedTargetMangle(t, 2019, "x = 0xFEDCBA9876543210n", "x = /* @__PURE__ */ BigInt(\"0xFEDCBA9876543210\");\n")
+ expectPrintedTargetMangle(t, 2019, "x = 0XFEDCBA9876543210n", "x = /* @__PURE__ */ BigInt(\"0XFEDCBA9876543210\");\n")
+ expectPrintedTargetMangle(t, 2019, "x = 0xb0ba_cafe_f00dn", "x = /* @__PURE__ */ BigInt(0xb0bacafef00d);\n")
+ expectPrintedTargetMangle(t, 2019, "x = 0xB0BA_CAFE_F00Dn", "x = /* @__PURE__ */ BigInt(0xB0BACAFEF00D);\n")
+ expectPrintedTargetMangle(t, 2019, "x = 102030405060708090807060504030201n", "x = /* @__PURE__ */ BigInt(\"102030405060708090807060504030201\");\n")
+}
diff --git a/internal/linker/linker.go b/internal/linker/linker.go
index 219fe8e614b..e37ed9a784d 100644
--- a/internal/linker/linker.go
+++ b/internal/linker/linker.go
@@ -13,6 +13,7 @@ import (
"encoding/binary"
"fmt"
"hash"
+ "net/url"
"path"
"sort"
"strconv"
@@ -687,7 +688,7 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF
})
// Generate the optional legal comments file for this chunk
- if chunk.externalLegalComments != nil {
+ if len(chunk.externalLegalComments) > 0 {
finalRelPathForLegalComments := chunk.finalRelPath + ".LEGAL.txt"
// Link the file to the legal comments
@@ -719,10 +720,11 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF
case config.SourceMapLinkedWithComment:
importPath := c.pathBetweenChunks(finalRelDir, finalRelPathForSourceMap)
importPath = strings.TrimPrefix(importPath, "./")
+ importURL := url.URL{Path: importPath}
outputContentsJoiner.EnsureNewlineAtEnd()
outputContentsJoiner.AddString(commentPrefix)
outputContentsJoiner.AddString("# sourceMappingURL=")
- outputContentsJoiner.AddString(importPath)
+ outputContentsJoiner.AddString(importURL.EscapedPath())
outputContentsJoiner.AddString(commentSuffix)
outputContentsJoiner.AddString("\n")
@@ -5849,6 +5851,16 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai
// Ignore empty source map chunks
if compileResult.SourceMapChunk.ShouldIgnore {
prevOffset.AdvanceBytes(compileResult.JS)
+
+ // Include a null entry in the source map
+ if len(compileResult.JS) > 0 && c.options.SourceMap != config.SourceMapNone {
+ if n := len(compileResultsForSourceMap); n > 0 && !compileResultsForSourceMap[n-1].isNullEntry {
+ compileResultsForSourceMap = append(compileResultsForSourceMap, compileResultForSourceMap{
+ sourceIndex: compileResult.sourceIndex,
+ isNullEntry: true,
+ })
+ }
+ }
} else {
prevOffset = sourcemap.LineColumnOffset{}
@@ -5963,7 +5975,7 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai
func (c *linkerContext) generateGlobalNamePrefix() string {
var text string
globalName := c.options.GlobalName
- prefix := globalName[0]
+ prefix, globalName := globalName[0], globalName[1:]
space := " "
join := ";\n"
@@ -5972,9 +5984,18 @@ func (c *linkerContext) generateGlobalNamePrefix() string {
join = ";"
}
+ // Assume the "this" and "import.meta" objects always exist
+ isExistingObject := prefix == "this"
+ if prefix == "import" && len(globalName) > 0 && globalName[0] == "meta" {
+ prefix, globalName = "import.meta", globalName[1:]
+ isExistingObject = true
+ }
+
// Use "||=" to make the code more compact when it's supported
- if len(globalName) > 1 && !c.options.UnsupportedJSFeatures.Has(compat.LogicalAssignment) {
- if js_printer.CanEscapeIdentifier(prefix, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) {
+ if len(globalName) > 0 && !c.options.UnsupportedJSFeatures.Has(compat.LogicalAssignment) {
+ if isExistingObject {
+ // Keep the prefix as it is
+ } else if js_printer.CanEscapeIdentifier(prefix, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) {
if c.options.ASCIIOnly {
prefix = string(js_printer.QuoteIdentifier(nil, prefix, c.options.UnsupportedJSFeatures))
}
@@ -5982,7 +6003,7 @@ func (c *linkerContext) generateGlobalNamePrefix() string {
} else {
prefix = fmt.Sprintf("this[%s]", helpers.QuoteForJSON(prefix, c.options.ASCIIOnly))
}
- for _, name := range globalName[1:] {
+ for _, name := range globalName {
var dotOrIndex string
if js_printer.CanEscapeIdentifier(name, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) {
if c.options.ASCIIOnly {
@@ -5992,12 +6013,19 @@ func (c *linkerContext) generateGlobalNamePrefix() string {
} else {
dotOrIndex = fmt.Sprintf("[%s]", helpers.QuoteForJSON(name, c.options.ASCIIOnly))
}
- prefix = fmt.Sprintf("(%s%s||=%s{})%s", prefix, space, space, dotOrIndex)
+ if isExistingObject {
+ prefix = fmt.Sprintf("%s%s", prefix, dotOrIndex)
+ isExistingObject = false
+ } else {
+ prefix = fmt.Sprintf("(%s%s||=%s{})%s", prefix, space, space, dotOrIndex)
+ }
}
return fmt.Sprintf("%s%s%s=%s", text, prefix, space, space)
}
- if js_printer.CanEscapeIdentifier(prefix, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) {
+ if isExistingObject {
+ text = fmt.Sprintf("%s%s=%s", prefix, space, space)
+ } else if js_printer.CanEscapeIdentifier(prefix, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) {
if c.options.ASCIIOnly {
prefix = string(js_printer.QuoteIdentifier(nil, prefix, c.options.UnsupportedJSFeatures))
}
@@ -6007,7 +6035,7 @@ func (c *linkerContext) generateGlobalNamePrefix() string {
text = fmt.Sprintf("%s%s=%s", prefix, space, space)
}
- for _, name := range globalName[1:] {
+ for _, name := range globalName {
oldPrefix := prefix
if js_printer.CanEscapeIdentifier(name, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) {
if c.options.ASCIIOnly {
@@ -6330,6 +6358,16 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa
// Ignore empty source map chunks
if compileResult.SourceMapChunk.ShouldIgnore {
prevOffset.AdvanceBytes(compileResult.CSS)
+
+ // Include a null entry in the source map
+ if len(compileResult.CSS) > 0 && c.options.SourceMap != config.SourceMapNone && compileResult.sourceIndex.IsValid() {
+ if n := len(compileResultsForSourceMap); n > 0 && !compileResultsForSourceMap[n-1].isNullEntry {
+ compileResultsForSourceMap = append(compileResultsForSourceMap, compileResultForSourceMap{
+ sourceIndex: compileResult.sourceIndex.GetIndex(),
+ isNullEntry: true,
+ })
+ }
+ }
} else {
prevOffset = sourcemap.LineColumnOffset{}
@@ -6902,6 +6940,7 @@ type compileResultForSourceMap struct {
sourceMapChunk sourcemap.Chunk
generatedOffset sourcemap.LineColumnOffset
sourceIndex uint32
+ isNullEntry bool
}
func (c *linkerContext) generateSourceMapForChunk(
@@ -6918,8 +6957,7 @@ func (c *linkerContext) generateSourceMapForChunk(
// Generate the "sources" and "sourcesContent" arrays
type item struct {
- path logger.Path
- prettyPath string
+ source string
quotedContents []byte
}
items := make([]item, 0, len(results))
@@ -6929,6 +6967,9 @@ func (c *linkerContext) generateSourceMapForChunk(
continue
}
sourceIndexToSourcesIndex[result.sourceIndex] = nextSourcesIndex
+ if result.isNullEntry {
+ continue
+ }
file := &c.graph.Files[result.sourceIndex]
// Simple case: no nested source map
@@ -6937,9 +6978,12 @@ func (c *linkerContext) generateSourceMapForChunk(
if !c.options.ExcludeSourcesContent {
quotedContents = dataForSourceMaps[result.sourceIndex].QuotedContents[0]
}
+ source := file.InputFile.Source.KeyPath.Text
+ if file.InputFile.Source.KeyPath.Namespace == "file" {
+ source = helpers.FileURLFromFilePath(source).String()
+ }
items = append(items, item{
- path: file.InputFile.Source.KeyPath,
- prettyPath: file.InputFile.Source.PrettyPath,
+ source: source,
quotedContents: quotedContents,
})
nextSourcesIndex++
@@ -6949,24 +6993,12 @@ func (c *linkerContext) generateSourceMapForChunk(
// Complex case: nested source map
sm := file.InputFile.InputSourceMap
for i, source := range sm.Sources {
- path := logger.Path{
- Namespace: file.InputFile.Source.KeyPath.Namespace,
- Text: source,
- }
-
- // If this file is in the "file" namespace, change the relative path in
- // the source map into an absolute path using the directory of this file
- if path.Namespace == "file" {
- path.Text = c.fs.Join(c.fs.Dir(file.InputFile.Source.KeyPath.Text), source)
- }
-
var quotedContents []byte
if !c.options.ExcludeSourcesContent {
quotedContents = dataForSourceMaps[result.sourceIndex].QuotedContents[i]
}
items = append(items, item{
- path: path,
- prettyPath: source,
+ source: source,
quotedContents: quotedContents,
})
}
@@ -6982,14 +7014,19 @@ func (c *linkerContext) generateSourceMapForChunk(
// Modify the absolute path to the original file to be relative to the
// directory that will contain the output file for this chunk
- if item.path.Namespace == "file" {
- if relPath, ok := c.fs.Rel(chunkAbsDir, item.path.Text); ok {
+ if sourceURL, err := url.Parse(item.source); err == nil && helpers.IsFileURL(sourceURL) {
+ sourcePath := helpers.FilePathFromFileURL(c.fs, sourceURL)
+ if relPath, ok := c.fs.Rel(chunkAbsDir, sourcePath); ok {
// Make sure to always use forward slashes, even on Windows
- item.prettyPath = strings.ReplaceAll(relPath, "\\", "/")
+ relativeURL := url.URL{Path: strings.ReplaceAll(relPath, "\\", "/")}
+ item.source = relativeURL.String()
+
+ // Replace certain percent encodings for better readability
+ item.source = strings.ReplaceAll(item.source, "%20", " ")
}
}
- j.AddBytes(helpers.QuoteForJSON(item.prettyPath, c.options.ASCIIOnly))
+ j.AddBytes(helpers.QuoteForJSON(item.source, c.options.ASCIIOnly))
}
j.AddString("]")
@@ -7044,28 +7081,38 @@ func (c *linkerContext) generateSourceMapForChunk(
startState.GeneratedColumn += prevColumnOffset
}
- // Append the precomputed source map chunk
- sourcemap.AppendSourceMapChunk(&j, prevEndState, startState, chunk.Buffer)
+ if result.isNullEntry {
+ // Emit a "null" mapping
+ chunk.Buffer.Data = []byte("A")
+ sourcemap.AppendSourceMapChunk(&j, prevEndState, startState, chunk.Buffer)
- // Generate the relative offset to start from next time
- prevOriginalName := prevEndState.OriginalName
- prevEndState = chunk.EndState
- prevEndState.SourceIndex += sourcesIndex
- if chunk.Buffer.FirstNameOffset.IsValid() {
- prevEndState.OriginalName += totalQuotedNameLen
+ // Only the generated position was advanced
+ prevEndState.GeneratedLine = startState.GeneratedLine
+ prevEndState.GeneratedColumn = startState.GeneratedColumn
} else {
- // It's possible for a chunk to have mappings but for none of those
- // mappings to have an associated name. The name is optional and is
- // omitted when the mapping is for a non-name token or if the final
- // and original names are the same. In that case we need to restore
- // the previous original name end state since it wasn't modified after
- // all. If we don't do this, then files after this will adjust their
- // name offsets assuming that the previous generated mapping has this
- // file's offset, which is wrong.
- prevEndState.OriginalName = prevOriginalName
- }
- prevColumnOffset = chunk.FinalGeneratedColumn
- totalQuotedNameLen += len(chunk.QuotedNames)
+ // Append the precomputed source map chunk
+ sourcemap.AppendSourceMapChunk(&j, prevEndState, startState, chunk.Buffer)
+
+ // Generate the relative offset to start from next time
+ prevOriginalName := prevEndState.OriginalName
+ prevEndState = chunk.EndState
+ prevEndState.SourceIndex += sourcesIndex
+ if chunk.Buffer.FirstNameOffset.IsValid() {
+ prevEndState.OriginalName += totalQuotedNameLen
+ } else {
+ // It's possible for a chunk to have mappings but for none of those
+ // mappings to have an associated name. The name is optional and is
+ // omitted when the mapping is for a non-name token or if the final
+ // and original names are the same. In that case we need to restore
+ // the previous original name end state since it wasn't modified after
+ // all. If we don't do this, then files after this will adjust their
+ // name offsets assuming that the previous generated mapping has this
+ // file's offset, which is wrong.
+ prevEndState.OriginalName = prevOriginalName
+ }
+ prevColumnOffset = chunk.FinalGeneratedColumn
+ totalQuotedNameLen += len(chunk.QuotedNames)
+ }
// If this was all one line, include the column offset from the start
if prevEndState.GeneratedLine == 0 {
diff --git a/internal/logger/msg_ids.go b/internal/logger/msg_ids.go
index 2e1e305ca4b..38f337fd71d 100644
--- a/internal/logger/msg_ids.go
+++ b/internal/logger/msg_ids.go
@@ -17,6 +17,7 @@ const (
MsgID_JS_AssignToConstant
MsgID_JS_AssignToDefine
MsgID_JS_AssignToImport
+ MsgID_JS_BigInt
MsgID_JS_CallImportNamespace
MsgID_JS_ClassNameWillThrow
MsgID_JS_CommonJSVariableInESM
@@ -68,8 +69,9 @@ const (
// Source maps
MsgID_SourceMap_InvalidSourceMappings
- MsgID_SourceMap_SectionsInSourceMap
+ MsgID_SourceMap_InvalidSourceURL
MsgID_SourceMap_MissingSourceMap
+ MsgID_SourceMap_SectionsInSourceMap
MsgID_SourceMap_UnsupportedSourceMapComment
// package.json
@@ -108,6 +110,8 @@ func StringToMsgIDs(str string, logLevel LogLevel, overrides map[MsgID]LogLevel)
overrides[MsgID_JS_AssignToDefine] = logLevel
case "assign-to-import":
overrides[MsgID_JS_AssignToImport] = logLevel
+ case "bigint":
+ overrides[MsgID_JS_BigInt] = logLevel
case "call-import-namespace":
overrides[MsgID_JS_CallImportNamespace] = logLevel
case "class-name-will-throw":
@@ -204,10 +208,12 @@ func StringToMsgIDs(str string, logLevel LogLevel, overrides map[MsgID]LogLevel)
// Source maps
case "invalid-source-mappings":
overrides[MsgID_SourceMap_InvalidSourceMappings] = logLevel
- case "sections-in-source-map":
- overrides[MsgID_SourceMap_SectionsInSourceMap] = logLevel
+ case "invalid-source-url":
+ overrides[MsgID_SourceMap_InvalidSourceURL] = logLevel
case "missing-source-map":
overrides[MsgID_SourceMap_MissingSourceMap] = logLevel
+ case "sections-in-source-map":
+ overrides[MsgID_SourceMap_SectionsInSourceMap] = logLevel
case "unsupported-source-map-comment":
overrides[MsgID_SourceMap_UnsupportedSourceMapComment] = logLevel
@@ -240,6 +246,8 @@ func MsgIDToString(id MsgID) string {
return "assign-to-define"
case MsgID_JS_AssignToImport:
return "assign-to-import"
+ case MsgID_JS_BigInt:
+ return "bigint"
case MsgID_JS_CallImportNamespace:
return "call-import-namespace"
case MsgID_JS_ClassNameWillThrow:
@@ -336,10 +344,12 @@ func MsgIDToString(id MsgID) string {
// Source maps
case MsgID_SourceMap_InvalidSourceMappings:
return "invalid-source-mappings"
- case MsgID_SourceMap_SectionsInSourceMap:
- return "sections-in-source-map"
+ case MsgID_SourceMap_InvalidSourceURL:
+ return "invalid-source-url"
case MsgID_SourceMap_MissingSourceMap:
return "missing-source-map"
+ case MsgID_SourceMap_SectionsInSourceMap:
+ return "sections-in-source-map"
case MsgID_SourceMap_UnsupportedSourceMapComment:
return "unsupported-source-map-comment"
diff --git a/internal/resolver/package_json.go b/internal/resolver/package_json.go
index acf5a220f60..068acdea0ef 100644
--- a/internal/resolver/package_json.go
+++ b/internal/resolver/package_json.go
@@ -710,7 +710,8 @@ func parseImportsExportsMap(source logger.Source, log logger.Log, json js_ast.Ex
// Track "dead" conditional branches that can never be reached
if foundDefault.Len != 0 || (foundImport.Len != 0 && foundRequire.Len != 0) {
deadCondition.ranges = append(deadCondition.ranges, keyRange)
- if deadCondition.reason == "" {
+ // Note: Don't warn about the "default" condition as it's supposed to be a catch-all condition
+ if deadCondition.reason == "" && key != "default" {
if foundDefault.Len != 0 {
deadCondition.reason = "\"default\""
deadCondition.notes = []logger.MsgData{
diff --git a/internal/resolver/resolver.go b/internal/resolver/resolver.go
index ccc21309caf..5522bb1d596 100644
--- a/internal/resolver/resolver.go
+++ b/internal/resolver/resolver.go
@@ -243,7 +243,7 @@ func NewResolver(call config.APICall, fs fs.FS, log logger.Log, caches *cache.Ca
// Filter out non-CSS extensions for CSS "@import" imports
cssExtensionOrder := make([]string, 0, len(options.ExtensionOrder))
for _, ext := range options.ExtensionOrder {
- if loader, ok := options.ExtensionToLoader[ext]; !ok || loader.IsCSS() {
+ if loader := config.LoaderFromFileExtension(options.ExtensionToLoader, ext); loader == config.LoaderNone || loader.IsCSS() {
cssExtensionOrder = append(cssExtensionOrder, ext)
}
}
@@ -257,23 +257,23 @@ func NewResolver(call config.APICall, fs fs.FS, log logger.Log, caches *cache.Ca
nodeModulesExtensionOrder := make([]string, 0, len(options.ExtensionOrder))
split := 0
for i, ext := range options.ExtensionOrder {
- if loader, ok := options.ExtensionToLoader[ext]; ok && loader == config.LoaderJS || loader == config.LoaderJSX {
+ if loader := config.LoaderFromFileExtension(options.ExtensionToLoader, ext); loader == config.LoaderJS || loader == config.LoaderJSX {
split = i + 1 // Split after the last JavaScript extension
}
}
if split != 0 { // Only do this if there are any JavaScript extensions
for _, ext := range options.ExtensionOrder[:split] { // Non-TypeScript extensions before the split
- if loader, ok := options.ExtensionToLoader[ext]; !ok || !loader.IsTypeScript() {
+ if loader := config.LoaderFromFileExtension(options.ExtensionToLoader, ext); !loader.IsTypeScript() {
nodeModulesExtensionOrder = append(nodeModulesExtensionOrder, ext)
}
}
for _, ext := range options.ExtensionOrder { // All TypeScript extensions
- if loader, ok := options.ExtensionToLoader[ext]; ok && loader.IsTypeScript() {
+ if loader := config.LoaderFromFileExtension(options.ExtensionToLoader, ext); loader.IsTypeScript() {
nodeModulesExtensionOrder = append(nodeModulesExtensionOrder, ext)
}
}
for _, ext := range options.ExtensionOrder[split:] { // Non-TypeScript extensions after the split
- if loader, ok := options.ExtensionToLoader[ext]; !ok || !loader.IsTypeScript() {
+ if loader := config.LoaderFromFileExtension(options.ExtensionToLoader, ext); !loader.IsTypeScript() {
nodeModulesExtensionOrder = append(nodeModulesExtensionOrder, ext)
}
}
@@ -1132,12 +1132,19 @@ func (r resolverQuery) dirInfoCached(path string) *dirInfo {
// Cache hit: stop now
if !ok {
+ // Update the cache to indicate failure. Even if the read failed, we don't
+ // want to retry again later. The directory is inaccessible so trying again
+ // is wasted. Doing this before calling "dirInfoUncached" prevents stack
+ // overflow in case this directory is recursively encountered again.
+ r.dirCache[path] = nil
+
// Cache miss: read the info
cached = r.dirInfoUncached(path)
- // Update the cache unconditionally. Even if the read failed, we don't want to
- // retry again later. The directory is inaccessible so trying again is wasted.
- r.dirCache[path] = cached
+ // Only update the cache again on success
+ if cached != nil {
+ r.dirCache[path] = cached
+ }
}
if r.debugLogs != nil {
@@ -1176,11 +1183,6 @@ func (r resolverQuery) parseTSConfig(file string, visited map[string]bool, confi
if visited[file] {
return nil, errParseErrorImportCycle
}
- if visited != nil {
- // This is only non-nil for "build" API calls. This is nil for "transform"
- // API calls, which tells us to not process "extends" fields.
- visited[file] = true
- }
contents, err, originalError := r.caches.FSCache.ReadFile(r.fs, file)
if r.debugLogs != nil && originalError != nil {
@@ -1199,7 +1201,20 @@ func (r resolverQuery) parseTSConfig(file string, visited map[string]bool, confi
PrettyPath: PrettyPath(r.fs, keyPath),
Contents: contents,
}
- return r.parseTSConfigFromSource(source, visited, configDir)
+ if visited != nil {
+ // This is only non-nil for "build" API calls. This is nil for "transform"
+ // API calls, which tells us to not process "extends" fields.
+ visited[file] = true
+ }
+ result, err := r.parseTSConfigFromSource(source, visited, configDir)
+ if visited != nil {
+ // Reset this to back false in case something uses TypeScript 5.0's multiple
+ // inheritance feature for "tsconfig.json" files. It should be valid to visit
+ // the same base "tsconfig.json" file multiple times from different multiple
+ // inheritance subtrees.
+ visited[file] = false
+ }
+ return result, err
}
func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map[string]bool, configDir string) (*TSConfigJSON, error) {
@@ -1294,7 +1309,8 @@ func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map
if entry, _ := entries.Get("package.json"); entry != nil && entry.Kind(r.fs) == fs.FileEntry {
// Check the "exports" map
if packageJSON := r.parsePackageJSON(result.pkgDirPath); packageJSON != nil && packageJSON.exportsMap != nil {
- if absolute, ok, _ := r.esmResolveAlgorithm(result.pkgIdent, "."+result.pkgSubpath, packageJSON, result.pkgDirPath, source.KeyPath.Text); ok {
+ if absolute, ok, _ := r.esmResolveAlgorithm(finalizeImportsExportsYarnPnPTSConfigExtends,
+ result.pkgIdent, "."+result.pkgSubpath, packageJSON, result.pkgDirPath, source.KeyPath.Text); ok {
base, err := r.parseTSConfig(absolute.Primary.Text, visited, configDir)
if result, shouldReturn := maybeFinishOurSearch(base, err, absolute.Primary.Text); shouldReturn {
return result
@@ -2236,6 +2252,7 @@ func (r resolverQuery) loadPackageImports(importPath string, dirInfoPackageJSON
}
absolute, ok, diffCase := r.finalizeImportsExportsResult(
+ finalizeImportsExportsNormal,
dirInfoPackageJSON.absPath, conditions, *packageJSON.importsMap, packageJSON,
resolvedPath, status, debug,
"", "", "",
@@ -2243,7 +2260,14 @@ func (r resolverQuery) loadPackageImports(importPath string, dirInfoPackageJSON
return absolute, ok, diffCase, nil
}
-func (r resolverQuery) esmResolveAlgorithm(esmPackageName string, esmPackageSubpath string, packageJSON *packageJSON, absPkgPath string, absPath string) (PathPair, bool, *fs.DifferentCase) {
+func (r resolverQuery) esmResolveAlgorithm(
+ kind finalizeImportsExportsKind,
+ esmPackageName string,
+ esmPackageSubpath string,
+ packageJSON *packageJSON,
+ absPkgPath string,
+ absPath string,
+) (PathPair, bool, *fs.DifferentCase) {
if r.debugLogs != nil {
r.debugLogs.addNote(fmt.Sprintf("Looking for %q in \"exports\" map in %q", esmPackageSubpath, packageJSON.source.KeyPath.Text))
r.debugLogs.increaseIndent()
@@ -2278,6 +2302,7 @@ func (r resolverQuery) esmResolveAlgorithm(esmPackageName string, esmPackageSubp
resolvedPath, status, debug = r.esmHandlePostConditions(resolvedPath, status, debug)
return r.finalizeImportsExportsResult(
+ kind,
absPkgPath, conditions, *packageJSON.exportsMap, packageJSON,
resolvedPath, status, debug,
esmPackageName, esmPackageSubpath, absPath,
@@ -2358,7 +2383,7 @@ func (r resolverQuery) loadNodeModules(importPath string, dirInfo *dirInfo, forb
if pkgDirInfo := r.dirInfoCached(result.pkgDirPath); pkgDirInfo != nil {
// Check the "exports" map
if packageJSON := pkgDirInfo.packageJSON; packageJSON != nil && packageJSON.exportsMap != nil {
- absolute, ok, diffCase := r.esmResolveAlgorithm(result.pkgIdent, "."+result.pkgSubpath, packageJSON, pkgDirInfo.absPath, absPath)
+ absolute, ok, diffCase := r.esmResolveAlgorithm(finalizeImportsExportsNormal, result.pkgIdent, "."+result.pkgSubpath, packageJSON, pkgDirInfo.absPath, absPath)
return absolute, ok, diffCase, nil
}
@@ -2393,7 +2418,7 @@ func (r resolverQuery) loadNodeModules(importPath string, dirInfo *dirInfo, forb
// Check for self-references
if dirInfoPackageJSON != nil {
if packageJSON := dirInfoPackageJSON.packageJSON; packageJSON.name == esmPackageName && packageJSON.exportsMap != nil {
- absolute, ok, diffCase := r.esmResolveAlgorithm(esmPackageName, esmPackageSubpath, packageJSON,
+ absolute, ok, diffCase := r.esmResolveAlgorithm(finalizeImportsExportsNormal, esmPackageName, esmPackageSubpath, packageJSON,
dirInfoPackageJSON.absPath, r.fs.Join(dirInfoPackageJSON.absPath, esmPackageSubpath))
return absolute, ok, diffCase, nil
}
@@ -2412,7 +2437,7 @@ func (r resolverQuery) loadNodeModules(importPath string, dirInfo *dirInfo, forb
if pkgDirInfo := r.dirInfoCached(absPkgPath); pkgDirInfo != nil {
// Check the "exports" map
if packageJSON := pkgDirInfo.packageJSON; packageJSON != nil && packageJSON.exportsMap != nil {
- absolute, ok, diffCase := r.esmResolveAlgorithm(esmPackageName, esmPackageSubpath, packageJSON, absPkgPath, absPath)
+ absolute, ok, diffCase := r.esmResolveAlgorithm(finalizeImportsExportsNormal, esmPackageName, esmPackageSubpath, packageJSON, absPkgPath, absPath)
return absolute, ok, diffCase, nil, true
}
@@ -2524,7 +2549,15 @@ func (r resolverQuery) checkForBuiltInNodeModules(importPath string) (PathPair,
return PathPair{}, false, nil
}
+type finalizeImportsExportsKind uint8
+
+const (
+ finalizeImportsExportsNormal finalizeImportsExportsKind = iota
+ finalizeImportsExportsYarnPnPTSConfigExtends
+)
+
func (r resolverQuery) finalizeImportsExportsResult(
+ kind finalizeImportsExportsKind,
absDirPath string,
conditions map[string]bool,
importExportMap pjMap,
@@ -2551,6 +2584,14 @@ func (r resolverQuery) finalizeImportsExportsResult(
r.debugLogs.addNote(fmt.Sprintf("The resolved path %q is exact", absResolvedPath))
}
+ // Avoid calling "dirInfoCached" recursively for "tsconfig.json" extends with Yarn PnP
+ if kind == finalizeImportsExportsYarnPnPTSConfigExtends {
+ if r.debugLogs != nil {
+ r.debugLogs.addNote(fmt.Sprintf("Resolved to %q", absResolvedPath))
+ }
+ return PathPair{Primary: logger.Path{Text: absResolvedPath, Namespace: "file"}}, true, nil
+ }
+
resolvedDirInfo := r.dirInfoCached(r.fs.Dir(absResolvedPath))
base := r.fs.Base(absResolvedPath)
extensionOrder := r.options.ExtensionOrder
diff --git a/internal/resolver/tsconfig_json.go b/internal/resolver/tsconfig_json.go
index edfc7755cee..1f9063e58a9 100644
--- a/internal/resolver/tsconfig_json.go
+++ b/internal/resolver/tsconfig_json.go
@@ -221,7 +221,7 @@ func ParseTSConfigJSON(
switch lowerValue {
case "es3", "es5", "es6", "es2015", "es2016", "es2017", "es2018", "es2019", "es2020", "es2021":
result.Settings.Target = config.TSTargetBelowES2022
- case "es2022", "es2023", "esnext":
+ case "es2022", "es2023", "es2024", "esnext":
result.Settings.Target = config.TSTargetAtOrAboveES2022
default:
ok = false
diff --git a/internal/sourcemap/sourcemap.go b/internal/sourcemap/sourcemap.go
index 93effc2102e..4d759dbbaec 100644
--- a/internal/sourcemap/sourcemap.go
+++ b/internal/sourcemap/sourcemap.go
@@ -2,6 +2,7 @@ package sourcemap
import (
"bytes"
+ "strings"
"unicode/utf8"
"github.com/evanw/esbuild/internal/ast"
@@ -315,14 +316,14 @@ func (pieces SourceMapPieces) Finalize(shifts []SourceMapShift) []byte {
potentialStartOfRun := current
- // Skip over the original position information
- _, current = DecodeVLQ(pieces.Mappings, current) // The original source
- _, current = DecodeVLQ(pieces.Mappings, current) // The original line
- _, current = DecodeVLQ(pieces.Mappings, current) // The original column
-
- // Skip over the original name
+ // Skip over the original position information if present
if current < len(pieces.Mappings) {
- if c := pieces.Mappings[current]; c != ',' && c != ';' {
+ _, current = DecodeVLQ(pieces.Mappings, current) // The original source
+ _, current = DecodeVLQ(pieces.Mappings, current) // The original line
+ _, current = DecodeVLQ(pieces.Mappings, current) // The original column
+
+ // Skip over the original name if present
+ if current < len(pieces.Mappings) {
_, current = DecodeVLQ(pieces.Mappings, current)
}
}
@@ -426,20 +427,28 @@ func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startS
// case below instead. Original names are optional and are often omitted, so
// we handle it uniformly by saving an index to the first original name,
// which may or may not be a part of the first mapping.
+ var sourceIndex int
+ var originalLine int
+ var originalColumn int
+ omitSource := false
generatedColumn, i := DecodeVLQ(buffer.Data, semicolons)
- sourceIndex, i := DecodeVLQ(buffer.Data, i)
- originalLine, i := DecodeVLQ(buffer.Data, i)
- originalColumn, i := DecodeVLQ(buffer.Data, i)
+ if i == len(buffer.Data) || strings.IndexByte(",;", buffer.Data[i]) != -1 {
+ omitSource = true
+ } else {
+ sourceIndex, i = DecodeVLQ(buffer.Data, i)
+ originalLine, i = DecodeVLQ(buffer.Data, i)
+ originalColumn, i = DecodeVLQ(buffer.Data, i)
+ }
// Rewrite the first mapping to be relative to the end state of the previous
// chunk. We now know what the end state is because we're in the second pass
// where all chunks have already been generated.
- startState.SourceIndex += sourceIndex
startState.GeneratedColumn += generatedColumn
+ startState.SourceIndex += sourceIndex
startState.OriginalLine += originalLine
startState.OriginalColumn += originalColumn
prevEndState.HasOriginalName = false // This is handled separately below
- rewritten, _ := appendMappingToBuffer(nil, j.LastByte(), prevEndState, startState)
+ rewritten, _ := appendMappingToBuffer(nil, j.LastByte(), prevEndState, startState, omitSource)
j.AddBytes(rewritten)
// Next, if there's an original name, we need to rewrite that as well to be
@@ -458,7 +467,9 @@ func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startS
j.AddBytes(buffer.Data[i:])
}
-func appendMappingToBuffer(buffer []byte, lastByte byte, prevState SourceMapState, currentState SourceMapState) ([]byte, ast.Index32) {
+func appendMappingToBuffer(
+ buffer []byte, lastByte byte, prevState SourceMapState, currentState SourceMapState, omitSource bool,
+) ([]byte, ast.Index32) {
// Put commas in between mappings
if lastByte != 0 && lastByte != ';' && lastByte != '"' {
buffer = append(buffer, ',')
@@ -466,9 +477,11 @@ func appendMappingToBuffer(buffer []byte, lastByte byte, prevState SourceMapStat
// Record the mapping (note that the generated line is recorded using ';' elsewhere)
buffer = encodeVLQ(buffer, currentState.GeneratedColumn-prevState.GeneratedColumn)
- buffer = encodeVLQ(buffer, currentState.SourceIndex-prevState.SourceIndex)
- buffer = encodeVLQ(buffer, currentState.OriginalLine-prevState.OriginalLine)
- buffer = encodeVLQ(buffer, currentState.OriginalColumn-prevState.OriginalColumn)
+ if !omitSource {
+ buffer = encodeVLQ(buffer, currentState.SourceIndex-prevState.SourceIndex)
+ buffer = encodeVLQ(buffer, currentState.OriginalLine-prevState.OriginalLine)
+ buffer = encodeVLQ(buffer, currentState.OriginalColumn-prevState.OriginalColumn)
+ }
// Record the optional original name
var nameOffset ast.Index32
@@ -820,7 +833,7 @@ func (b *ChunkBuilder) appendMappingWithoutRemapping(currentState SourceMapState
}
var nameOffset ast.Index32
- b.sourceMap, nameOffset = appendMappingToBuffer(b.sourceMap, lastByte, b.prevState, currentState)
+ b.sourceMap, nameOffset = appendMappingToBuffer(b.sourceMap, lastByte, b.prevState, currentState, false)
prevOriginalName := b.prevState.OriginalName
b.prevState = currentState
if !currentState.HasOriginalName {
diff --git a/lib/deno/mod.ts b/lib/deno/mod.ts
index b6233929362..eea9a081111 100644
--- a/lib/deno/mod.ts
+++ b/lib/deno/mod.ts
@@ -203,11 +203,12 @@ const spawnNew: SpawnFn = (cmd, { args, stdin, stdout, stderr }) => {
stdout,
stderr,
}).spawn()
- const writer = child.stdin.getWriter()
- const reader = child.stdout.getReader()
+ // Note: Need to check for "piped" in Deno ≥1.31.0 to avoid a crash
+ const writer = stdin === 'piped' ? child.stdin.getWriter() : null
+ const reader = stdout === 'piped' ? child.stdout.getReader() : null
return {
- write: bytes => writer.write(bytes),
- read: () => reader.read().then(x => x.value || null),
+ write: writer ? bytes => writer.write(bytes) : () => Promise.resolve(),
+ read: reader ? () => reader.read().then(x => x.value || null) : () => Promise.resolve(null),
close: async () => {
// We can't call "kill()" because it doesn't seem to work. Tests will
// still fail with "A child process was opened during the test, but not
@@ -223,8 +224,8 @@ const spawnNew: SpawnFn = (cmd, { args, stdin, stdout, stderr }) => {
// we can do.
//
// See this for more info: https://github.com/evanw/esbuild/pull/3611
- await writer.close()
- await reader.cancel()
+ if (writer) await writer.close()
+ if (reader) await reader.cancel()
// Wait for the process to exit. The new "kill()" API doesn't flag the
// process as having exited because processes can technically ignore the
diff --git a/lib/npm/browser.ts b/lib/npm/browser.ts
index 325687c8dd3..fcf4acd276d 100644
--- a/lib/npm/browser.ts
+++ b/lib/npm/browser.ts
@@ -88,6 +88,9 @@ const startRunningService = async (wasmURL: string | URL, wasmModule: WebAssembl
terminate: () => void
}
+ let rejectAllWith: (error: unknown) => void
+ const rejectAllPromise = new Promise(resolve => rejectAllWith = resolve)
+
if (useWorker) {
// Run esbuild off the main thread
let blob = new Blob([`onmessage=${WEB_WORKER_SOURCE_CODE}(postMessage)`], { type: 'text/javascript' })
@@ -98,7 +101,13 @@ const startRunningService = async (wasmURL: string | URL, wasmModule: WebAssembl
let go: Go | undefined
worker = {
onmessage: null,
- postMessage: data => setTimeout(() => go = onmessage({ data })),
+ postMessage: data => setTimeout(() => {
+ try {
+ go = onmessage({ data })
+ } catch (error) {
+ rejectAllWith(error) // Catch strange crashes (e.g. stack overflow)
+ }
+ }),
terminate() {
if (go)
for (let timeout of go._scheduledTimeouts.values())
@@ -144,7 +153,8 @@ const startRunningService = async (wasmURL: string | URL, wasmModule: WebAssembl
longLivedService = {
build: (options: types.BuildOptions) =>
- new Promise((resolve, reject) =>
+ new Promise((resolve, reject) => {
+ rejectAllPromise.then(reject)
service.buildOrContext({
callName: 'build',
refs: null,
@@ -152,10 +162,12 @@ const startRunningService = async (wasmURL: string | URL, wasmModule: WebAssembl
isTTY: false,
defaultWD: '/',
callback: (err, res) => err ? reject(err) : resolve(res as types.BuildResult),
- })),
+ })
+ }),
context: (options: types.BuildOptions) =>
- new Promise((resolve, reject) =>
+ new Promise((resolve, reject) => {
+ rejectAllPromise.then(reject)
service.buildOrContext({
callName: 'context',
refs: null,
@@ -163,10 +175,12 @@ const startRunningService = async (wasmURL: string | URL, wasmModule: WebAssembl
isTTY: false,
defaultWD: '/',
callback: (err, res) => err ? reject(err) : resolve(res as types.BuildContext),
- })),
+ })
+ }),
transform: (input: string | Uint8Array, options?: types.TransformOptions) =>
- new Promise((resolve, reject) =>
+ new Promise((resolve, reject) => {
+ rejectAllPromise.then(reject)
service.transform({
callName: 'transform',
refs: null,
@@ -178,27 +192,32 @@ const startRunningService = async (wasmURL: string | URL, wasmModule: WebAssembl
writeFile(_, callback) { callback(null); },
},
callback: (err, res) => err ? reject(err) : resolve(res!),
- })),
+ })
+ }),
formatMessages: (messages, options) =>
- new Promise((resolve, reject) =>
+ new Promise((resolve, reject) => {
+ rejectAllPromise.then(reject)
service.formatMessages({
callName: 'formatMessages',
refs: null,
messages,
options,
callback: (err, res) => err ? reject(err) : resolve(res!),
- })),
+ })
+ }),
analyzeMetafile: (metafile, options) =>
- new Promise((resolve, reject) =>
+ new Promise((resolve, reject) => {
+ rejectAllPromise.then(reject)
service.analyzeMetafile({
callName: 'analyzeMetafile',
refs: null,
metafile: typeof metafile === 'string' ? metafile : JSON.stringify(metafile),
options,
callback: (err, res) => err ? reject(err) : resolve(res!),
- })),
+ })
+ }),
}
}
diff --git a/lib/npm/node-platform.ts b/lib/npm/node-platform.ts
index bda3b0948cc..559c5409712 100644
--- a/lib/npm/node-platform.ts
+++ b/lib/npm/node-platform.ts
@@ -41,6 +41,7 @@ export const knownUnixlikePackages: Record = {
'linux s390x BE': '@esbuild/linux-s390x',
'linux x64 LE': '@esbuild/linux-x64',
'linux loong64 LE': '@esbuild/linux-loong64',
+ 'netbsd arm64 LE': '@esbuild/netbsd-arm64',
'netbsd x64 LE': '@esbuild/netbsd-x64',
'openbsd arm64 LE': '@esbuild/openbsd-arm64',
'openbsd x64 LE': '@esbuild/openbsd-x64',
diff --git a/lib/shared/common.ts b/lib/shared/common.ts
index c323016ba55..ac7a2b2d189 100644
--- a/lib/shared/common.ts
+++ b/lib/shared/common.ts
@@ -28,6 +28,9 @@ let mustBeRegExp = (value: RegExp | undefined): string | null =>
let mustBeInteger = (value: number | undefined): string | null =>
typeof value === 'number' && value === (value | 0) ? null : 'an integer'
+let mustBeValidPortNumber = (value: number | undefined): string | null =>
+ typeof value === 'number' && value === (value | 0) && value >= 0 && value <= 0xFFFF ? null : 'a valid port number'
+
let mustBeFunction = (value: Function | undefined): string | null =>
typeof value === 'function' ? null : 'a function'
@@ -1091,7 +1094,7 @@ function buildOrContextImpl(
serve: (options = {}) => new Promise((resolve, reject) => {
if (!streamIn.hasFS) throw new Error(`Cannot use the "serve" API in this environment`)
const keys: OptionKeys = {}
- const port = getFlag(options, keys, 'port', mustBeInteger)
+ const port = getFlag(options, keys, 'port', mustBeValidPortNumber)
const host = getFlag(options, keys, 'host', mustBeString)
const servedir = getFlag(options, keys, 'servedir', mustBeString)
const keyfile = getFlag(options, keys, 'keyfile', mustBeString)
diff --git a/lib/shared/stdio_protocol.ts b/lib/shared/stdio_protocol.ts
index 2b14630fa42..5f5749c6dbc 100644
--- a/lib/shared/stdio_protocol.ts
+++ b/lib/shared/stdio_protocol.ts
@@ -35,7 +35,7 @@ export interface ServeRequest {
export interface ServeResponse {
port: number
- host: string
+ hosts: string[]
}
export interface BuildPlugin {
diff --git a/lib/shared/types.ts b/lib/shared/types.ts
index c7053070fe0..d0ae5104bda 100644
--- a/lib/shared/types.ts
+++ b/lib/shared/types.ts
@@ -256,7 +256,7 @@ export interface ServeOnRequestArgs {
/** Documentation: https://esbuild.github.io/api/#serve-return-values */
export interface ServeResult {
port: number
- host: string
+ hosts: string[]
}
export interface TransformOptions extends CommonOptions {
diff --git a/npm/@esbuild/aix-ppc64/package.json b/npm/@esbuild/aix-ppc64/package.json
index f354cb40a0d..def2b6628f7 100644
--- a/npm/@esbuild/aix-ppc64/package.json
+++ b/npm/@esbuild/aix-ppc64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/aix-ppc64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The IBM AIX PowerPC 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/android-arm/package.json b/npm/@esbuild/android-arm/package.json
index e625f4bce0b..2790f3b32dc 100644
--- a/npm/@esbuild/android-arm/package.json
+++ b/npm/@esbuild/android-arm/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/android-arm",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "A WebAssembly shim for esbuild on Android ARM.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/android-arm64/package.json b/npm/@esbuild/android-arm64/package.json
index 7ff02423c0a..9fa7d4e3205 100644
--- a/npm/@esbuild/android-arm64/package.json
+++ b/npm/@esbuild/android-arm64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/android-arm64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Android ARM 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/android-x64/package.json b/npm/@esbuild/android-x64/package.json
index 93913bfc08b..69623b5f762 100644
--- a/npm/@esbuild/android-x64/package.json
+++ b/npm/@esbuild/android-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/android-x64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "A WebAssembly shim for esbuild on Android x64.",
"repository": "https://github.com/evanw/esbuild",
"license": "MIT",
diff --git a/npm/@esbuild/darwin-arm64/package.json b/npm/@esbuild/darwin-arm64/package.json
index 8ad9d31c73f..99aceae65e9 100644
--- a/npm/@esbuild/darwin-arm64/package.json
+++ b/npm/@esbuild/darwin-arm64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/darwin-arm64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The macOS ARM 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/darwin-x64/package.json b/npm/@esbuild/darwin-x64/package.json
index bde3c0c970d..677ff5bd270 100644
--- a/npm/@esbuild/darwin-x64/package.json
+++ b/npm/@esbuild/darwin-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/darwin-x64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The macOS 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/freebsd-arm64/package.json b/npm/@esbuild/freebsd-arm64/package.json
index 4da08858d96..69fb4c5fd03 100644
--- a/npm/@esbuild/freebsd-arm64/package.json
+++ b/npm/@esbuild/freebsd-arm64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/freebsd-arm64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The FreeBSD ARM 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/freebsd-x64/package.json b/npm/@esbuild/freebsd-x64/package.json
index 96af5989ba9..b5f301333c3 100644
--- a/npm/@esbuild/freebsd-x64/package.json
+++ b/npm/@esbuild/freebsd-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/freebsd-x64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The FreeBSD 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-arm/package.json b/npm/@esbuild/linux-arm/package.json
index af9c3d76913..49ec1b5f69f 100644
--- a/npm/@esbuild/linux-arm/package.json
+++ b/npm/@esbuild/linux-arm/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-arm",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux ARM binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-arm64/package.json b/npm/@esbuild/linux-arm64/package.json
index c2abee65854..33bcdd7e804 100644
--- a/npm/@esbuild/linux-arm64/package.json
+++ b/npm/@esbuild/linux-arm64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-arm64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux ARM 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-ia32/package.json b/npm/@esbuild/linux-ia32/package.json
index 72bd2d6b337..b906f469160 100644
--- a/npm/@esbuild/linux-ia32/package.json
+++ b/npm/@esbuild/linux-ia32/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-ia32",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux 32-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-loong64/package.json b/npm/@esbuild/linux-loong64/package.json
index bc125b49044..a1fdc539603 100644
--- a/npm/@esbuild/linux-loong64/package.json
+++ b/npm/@esbuild/linux-loong64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-loong64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux LoongArch 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-mips64el/package.json b/npm/@esbuild/linux-mips64el/package.json
index 467fea55eea..69f445a8d01 100644
--- a/npm/@esbuild/linux-mips64el/package.json
+++ b/npm/@esbuild/linux-mips64el/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-mips64el",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux MIPS 64-bit Little Endian binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-ppc64/package.json b/npm/@esbuild/linux-ppc64/package.json
index 33adcba6b29..082c7353e36 100644
--- a/npm/@esbuild/linux-ppc64/package.json
+++ b/npm/@esbuild/linux-ppc64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-ppc64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux PowerPC 64-bit Little Endian binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-riscv64/package.json b/npm/@esbuild/linux-riscv64/package.json
index e400a66d1ed..4169aa75965 100644
--- a/npm/@esbuild/linux-riscv64/package.json
+++ b/npm/@esbuild/linux-riscv64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-riscv64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux RISC-V 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-s390x/package.json b/npm/@esbuild/linux-s390x/package.json
index 3ca1227b56e..c72b4835e49 100644
--- a/npm/@esbuild/linux-s390x/package.json
+++ b/npm/@esbuild/linux-s390x/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-s390x",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux IBM Z 64-bit Big Endian binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/linux-x64/package.json b/npm/@esbuild/linux-x64/package.json
index 66cd896352e..cd213634875 100644
--- a/npm/@esbuild/linux-x64/package.json
+++ b/npm/@esbuild/linux-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/linux-x64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Linux 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/netbsd-arm64/README.md b/npm/@esbuild/netbsd-arm64/README.md
new file mode 100644
index 00000000000..ddbd635a15c
--- /dev/null
+++ b/npm/@esbuild/netbsd-arm64/README.md
@@ -0,0 +1,5 @@
+# esbuild
+
+This is the NetBSD ARM 64-bit binary for esbuild, a JavaScript bundler and minifier. See https://github.com/evanw/esbuild for details.
+
+⚠️ Note: NetBSD is not one of [Node's supported platforms](https://nodejs.org/api/process.html#process_process_platform), so installing esbuild may or may not work on NetBSD depending on how Node has been patched. This is not a problem with esbuild. ⚠️
diff --git a/npm/@esbuild/netbsd-arm64/package.json b/npm/@esbuild/netbsd-arm64/package.json
new file mode 100644
index 00000000000..d2e261b58db
--- /dev/null
+++ b/npm/@esbuild/netbsd-arm64/package.json
@@ -0,0 +1,20 @@
+{
+ "name": "@esbuild/netbsd-arm64",
+ "version": "0.25.0",
+ "description": "The NetBSD ARM 64-bit binary for esbuild, a JavaScript bundler.",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/evanw/esbuild.git"
+ },
+ "license": "MIT",
+ "preferUnplugged": true,
+ "engines": {
+ "node": ">=18"
+ },
+ "os": [
+ "netbsd"
+ ],
+ "cpu": [
+ "arm64"
+ ]
+}
diff --git a/npm/@esbuild/netbsd-x64/package.json b/npm/@esbuild/netbsd-x64/package.json
index a4443237003..f02bc766eda 100644
--- a/npm/@esbuild/netbsd-x64/package.json
+++ b/npm/@esbuild/netbsd-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/netbsd-x64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The NetBSD AMD64 binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/openbsd-arm64/package.json b/npm/@esbuild/openbsd-arm64/package.json
index 752a7aef05f..4debe24ac5f 100644
--- a/npm/@esbuild/openbsd-arm64/package.json
+++ b/npm/@esbuild/openbsd-arm64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/openbsd-arm64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The OpenBSD ARM 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/openbsd-x64/package.json b/npm/@esbuild/openbsd-x64/package.json
index 214e7145131..f60f27e7c4e 100644
--- a/npm/@esbuild/openbsd-x64/package.json
+++ b/npm/@esbuild/openbsd-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/openbsd-x64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The OpenBSD 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/sunos-x64/package.json b/npm/@esbuild/sunos-x64/package.json
index f807782b471..5b1617b7113 100644
--- a/npm/@esbuild/sunos-x64/package.json
+++ b/npm/@esbuild/sunos-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/sunos-x64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The illumos 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/wasi-preview1/package.json b/npm/@esbuild/wasi-preview1/package.json
index 9a4132a9af5..32bd623e1d4 100644
--- a/npm/@esbuild/wasi-preview1/package.json
+++ b/npm/@esbuild/wasi-preview1/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/wasi-preview1",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The WASI (WebAssembly System Interface) preview 1 binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/win32-arm64/package.json b/npm/@esbuild/win32-arm64/package.json
index 1098c27efcf..264b842a635 100644
--- a/npm/@esbuild/win32-arm64/package.json
+++ b/npm/@esbuild/win32-arm64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/win32-arm64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Windows ARM 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/win32-ia32/package.json b/npm/@esbuild/win32-ia32/package.json
index f76ca4dfffe..0953e2a6195 100644
--- a/npm/@esbuild/win32-ia32/package.json
+++ b/npm/@esbuild/win32-ia32/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/win32-ia32",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Windows 32-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/@esbuild/win32-x64/package.json b/npm/@esbuild/win32-x64/package.json
index a459f57bc6c..c52efe21f99 100644
--- a/npm/@esbuild/win32-x64/package.json
+++ b/npm/@esbuild/win32-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@esbuild/win32-x64",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The Windows 64-bit binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/esbuild-wasm/package.json b/npm/esbuild-wasm/package.json
index 8eecd0190bb..9cf63165e43 100644
--- a/npm/esbuild-wasm/package.json
+++ b/npm/esbuild-wasm/package.json
@@ -1,6 +1,6 @@
{
"name": "esbuild-wasm",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "The cross-platform WebAssembly binary for esbuild, a JavaScript bundler.",
"repository": {
"type": "git",
diff --git a/npm/esbuild/package.json b/npm/esbuild/package.json
index aaef74bf095..a36abce1920 100644
--- a/npm/esbuild/package.json
+++ b/npm/esbuild/package.json
@@ -1,6 +1,6 @@
{
"name": "esbuild",
- "version": "0.23.1",
+ "version": "0.25.0",
"description": "An extremely fast JavaScript and CSS bundler and minifier.",
"repository": {
"type": "git",
@@ -18,30 +18,31 @@
"esbuild": "bin/esbuild"
},
"optionalDependencies": {
- "@esbuild/aix-ppc64": "0.23.1",
- "@esbuild/android-arm": "0.23.1",
- "@esbuild/android-arm64": "0.23.1",
- "@esbuild/android-x64": "0.23.1",
- "@esbuild/darwin-arm64": "0.23.1",
- "@esbuild/darwin-x64": "0.23.1",
- "@esbuild/freebsd-arm64": "0.23.1",
- "@esbuild/freebsd-x64": "0.23.1",
- "@esbuild/linux-arm": "0.23.1",
- "@esbuild/linux-arm64": "0.23.1",
- "@esbuild/linux-ia32": "0.23.1",
- "@esbuild/linux-loong64": "0.23.1",
- "@esbuild/linux-mips64el": "0.23.1",
- "@esbuild/linux-ppc64": "0.23.1",
- "@esbuild/linux-riscv64": "0.23.1",
- "@esbuild/linux-s390x": "0.23.1",
- "@esbuild/linux-x64": "0.23.1",
- "@esbuild/netbsd-x64": "0.23.1",
- "@esbuild/openbsd-arm64": "0.23.1",
- "@esbuild/openbsd-x64": "0.23.1",
- "@esbuild/sunos-x64": "0.23.1",
- "@esbuild/win32-arm64": "0.23.1",
- "@esbuild/win32-ia32": "0.23.1",
- "@esbuild/win32-x64": "0.23.1"
+ "@esbuild/aix-ppc64": "0.25.0",
+ "@esbuild/android-arm": "0.25.0",
+ "@esbuild/android-arm64": "0.25.0",
+ "@esbuild/android-x64": "0.25.0",
+ "@esbuild/darwin-arm64": "0.25.0",
+ "@esbuild/darwin-x64": "0.25.0",
+ "@esbuild/freebsd-arm64": "0.25.0",
+ "@esbuild/freebsd-x64": "0.25.0",
+ "@esbuild/linux-arm": "0.25.0",
+ "@esbuild/linux-arm64": "0.25.0",
+ "@esbuild/linux-ia32": "0.25.0",
+ "@esbuild/linux-loong64": "0.25.0",
+ "@esbuild/linux-mips64el": "0.25.0",
+ "@esbuild/linux-ppc64": "0.25.0",
+ "@esbuild/linux-riscv64": "0.25.0",
+ "@esbuild/linux-s390x": "0.25.0",
+ "@esbuild/linux-x64": "0.25.0",
+ "@esbuild/netbsd-arm64": "0.25.0",
+ "@esbuild/netbsd-x64": "0.25.0",
+ "@esbuild/openbsd-arm64": "0.25.0",
+ "@esbuild/openbsd-x64": "0.25.0",
+ "@esbuild/sunos-x64": "0.25.0",
+ "@esbuild/win32-arm64": "0.25.0",
+ "@esbuild/win32-ia32": "0.25.0",
+ "@esbuild/win32-x64": "0.25.0"
},
"license": "MIT"
}
diff --git a/pkg/api/api.go b/pkg/api/api.go
index 08a597ec2a0..6f426b67e87 100644
--- a/pkg/api/api.go
+++ b/pkg/api/api.go
@@ -472,7 +472,7 @@ func Transform(input string, options TransformOptions) TransformResult {
// Documentation: https://esbuild.github.io/api/#serve-arguments
type ServeOptions struct {
- Port uint16
+ Port int
Host string
Servedir string
Keyfile string
@@ -491,8 +491,8 @@ type ServeOnRequestArgs struct {
// Documentation: https://esbuild.github.io/api/#serve-return-values
type ServeResult struct {
- Port uint16
- Host string
+ Port uint16
+ Hosts []string
}
type WatchOptions struct {
diff --git a/pkg/api/api_impl.go b/pkg/api/api_impl.go
index 7c737779e0f..0b62b95f418 100644
--- a/pkg/api/api_impl.go
+++ b/pkg/api/api_impl.go
@@ -386,11 +386,11 @@ func validateSupported(log logger.Log, supported map[string]bool) (
return
}
-func validateGlobalName(log logger.Log, text string) []string {
+func validateGlobalName(log logger.Log, text string, path string) []string {
if text != "" {
source := logger.Source{
- KeyPath: logger.Path{Text: "(global path)"},
- PrettyPath: "(global name)",
+ KeyPath: logger.Path{Text: path},
+ PrettyPath: path,
Contents: text,
}
@@ -516,7 +516,7 @@ func validateLoaders(log logger.Log, loaders map[string]Loader) map[string]confi
func validateJSXExpr(log logger.Log, text string, name string) config.DefineExpr {
if text != "" {
- if expr, _ := js_parser.ParseDefineExprOrJSON(text); len(expr.Parts) > 0 || (name == "fragment" && expr.Constant != nil) {
+ if expr, _ := js_parser.ParseDefineExpr(text); len(expr.Parts) > 0 || (name == "fragment" && expr.Constant != nil) {
return expr
}
log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid JSX %s: %q", name, text))
@@ -524,6 +524,18 @@ func validateJSXExpr(log logger.Log, text string, name string) config.DefineExpr
return config.DefineExpr{}
}
+// This returns an arbitrary but unique key for each unique array of strings
+func mapKeyForDefine(parts []string) string {
+ var sb strings.Builder
+ var n [4]byte
+ for _, part := range parts {
+ binary.LittleEndian.PutUint32(n[:], uint32(len(part)))
+ sb.Write(n[:])
+ sb.WriteString(part)
+ }
+ return sb.String()
+}
+
func validateDefines(
log logger.Log,
defines map[string]string,
@@ -533,32 +545,36 @@ func validateDefines(
minify bool,
drop Drop,
) (*config.ProcessedDefines, []config.InjectedDefine) {
+ // Sort injected defines for determinism, since the imports will be injected
+ // into every file in the order that we return them from this function
+ sortedKeys := make([]string, 0, len(defines))
+ for key := range defines {
+ sortedKeys = append(sortedKeys, key)
+ }
+ sort.Strings(sortedKeys)
+
rawDefines := make(map[string]config.DefineData)
- var valueToInject map[string]config.InjectedDefine
- var definesToInject []string
-
- for key, value := range defines {
- // The key must be a dot-separated identifier list
- for _, part := range strings.Split(key, ".") {
- if !js_ast.IsIdentifier(part) {
- if part == key {
- log.AddError(nil, logger.Range{}, fmt.Sprintf("The define key %q must be a valid identifier", key))
- } else {
- log.AddError(nil, logger.Range{}, fmt.Sprintf("The define key %q contains invalid identifier %q", key, part))
- }
- continue
- }
+ nodeEnvParts := []string{"process", "env", "NODE_ENV"}
+ nodeEnvMapKey := mapKeyForDefine(nodeEnvParts)
+ var injectedDefines []config.InjectedDefine
+
+ for _, key := range sortedKeys {
+ value := defines[key]
+ keyParts := validateGlobalName(log, key, "(define name)")
+ if keyParts == nil {
+ continue
}
+ mapKey := mapKeyForDefine(keyParts)
// Parse the value
- defineExpr, injectExpr := js_parser.ParseDefineExprOrJSON(value)
+ defineExpr, injectExpr := js_parser.ParseDefineExpr(value)
// Define simple expressions
if defineExpr.Constant != nil || len(defineExpr.Parts) > 0 {
- rawDefines[key] = config.DefineData{DefineExpr: &defineExpr}
+ rawDefines[mapKey] = config.DefineData{KeyParts: keyParts, DefineExpr: &defineExpr}
// Try to be helpful for common mistakes
- if len(defineExpr.Parts) == 1 && key == "process.env.NODE_ENV" {
+ if len(defineExpr.Parts) == 1 && mapKey == nodeEnvMapKey {
data := logger.MsgData{
Text: fmt.Sprintf("%q is defined as an identifier instead of a string (surround %q with quotes to get a string)", key, value),
}
@@ -606,32 +622,18 @@ func validateDefines(
// Inject complex expressions
if injectExpr != nil {
- definesToInject = append(definesToInject, key)
- if valueToInject == nil {
- valueToInject = make(map[string]config.InjectedDefine)
- }
- valueToInject[key] = config.InjectedDefine{
+ index := ast.MakeIndex32(uint32(len(injectedDefines)))
+ injectedDefines = append(injectedDefines, config.InjectedDefine{
Source: logger.Source{Contents: value},
Data: injectExpr,
Name: key,
- }
+ })
+ rawDefines[mapKey] = config.DefineData{KeyParts: keyParts, DefineExpr: &config.DefineExpr{InjectedDefineIndex: index}}
continue
}
// Anything else is unsupported
- log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid define value (must be an entity name or valid JSON syntax): %s", value))
- }
-
- // Sort injected defines for determinism, since the imports will be injected
- // into every file in the order that we return them from this function
- var injectedDefines []config.InjectedDefine
- if len(definesToInject) > 0 {
- injectedDefines = make([]config.InjectedDefine, len(definesToInject))
- sort.Strings(definesToInject)
- for i, key := range definesToInject {
- injectedDefines[i] = valueToInject[key]
- rawDefines[key] = config.DefineData{DefineExpr: &config.DefineExpr{InjectedDefineIndex: ast.MakeIndex32(uint32(i))}}
- }
+ log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid define value (must be an entity name or JS literal): %s", value))
}
// If we're bundling for the browser, add a special-cased define for
@@ -641,16 +643,16 @@ func validateDefines(
// is only done if it's not already defined so that you can override it if
// necessary.
if isBuildAPI && platform == config.PlatformBrowser {
- if _, process := rawDefines["process"]; !process {
- if _, processEnv := rawDefines["process.env"]; !processEnv {
- if _, processEnvNodeEnv := rawDefines["process.env.NODE_ENV"]; !processEnvNodeEnv {
+ if _, process := rawDefines[mapKeyForDefine([]string{"process"})]; !process {
+ if _, processEnv := rawDefines[mapKeyForDefine([]string{"process.env"})]; !processEnv {
+ if _, processEnvNodeEnv := rawDefines[nodeEnvMapKey]; !processEnvNodeEnv {
var value []uint16
if minify {
value = helpers.StringToUTF16("production")
} else {
value = helpers.StringToUTF16("development")
}
- rawDefines["process.env.NODE_ENV"] = config.DefineData{DefineExpr: &config.DefineExpr{Constant: &js_ast.EString{Value: value}}}
+ rawDefines[nodeEnvMapKey] = config.DefineData{KeyParts: nodeEnvParts, DefineExpr: &config.DefineExpr{Constant: &js_ast.EString{Value: value}}}
}
}
}
@@ -658,29 +660,35 @@ func validateDefines(
// If we're dropping all console API calls, replace each one with undefined
if (drop & DropConsole) != 0 {
- define := rawDefines["console"]
+ consoleParts := []string{"console"}
+ consoleMapKey := mapKeyForDefine(consoleParts)
+ define := rawDefines[consoleMapKey]
+ define.KeyParts = consoleParts
define.Flags |= config.MethodCallsMustBeReplacedWithUndefined
- rawDefines["console"] = define
+ rawDefines[consoleMapKey] = define
}
for _, key := range pureFns {
- // The key must be a dot-separated identifier list
- for _, part := range strings.Split(key, ".") {
- if !js_ast.IsIdentifier(part) {
- log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid pure function: %q", key))
- continue
- }
+ keyParts := validateGlobalName(log, key, "(pure name)")
+ if keyParts == nil {
+ continue
}
+ mapKey := mapKeyForDefine(keyParts)
// Merge with any previously-specified defines
- define := rawDefines[key]
+ define := rawDefines[mapKey]
+ define.KeyParts = keyParts
define.Flags |= config.CallCanBeUnwrappedIfUnused
- rawDefines[key] = define
+ rawDefines[mapKey] = define
}
// Processing defines is expensive. Process them once here so the same object
// can be shared between all parsers we create using these arguments.
- processed := config.ProcessDefines(rawDefines)
+ definesArray := make([]config.DefineData, 0, len(rawDefines))
+ for _, define := range rawDefines {
+ definesArray = append(definesArray, define)
+ }
+ processed := config.ProcessDefines(definesArray)
return &processed, injectedDefines
}
@@ -1263,7 +1271,7 @@ func validateBuildOptions(
ASCIIOnly: validateASCIIOnly(buildOpts.Charset),
IgnoreDCEAnnotations: buildOpts.IgnoreAnnotations,
TreeShaking: validateTreeShaking(buildOpts.TreeShaking, buildOpts.Bundle, buildOpts.Format),
- GlobalName: validateGlobalName(log, buildOpts.GlobalName),
+ GlobalName: validateGlobalName(log, buildOpts.GlobalName, "(global name)"),
CodeSplitting: buildOpts.Splitting,
OutputFormat: validateFormat(buildOpts.Format),
AbsOutputFile: validatePath(log, realFS, buildOpts.Outfile, "outfile path"),
@@ -1476,10 +1484,12 @@ func rebuildImpl(args rebuildArgs, oldHashes map[string]string) (rebuildState, m
newHashes := oldHashes
// Stop now if there were errors
+ var results []graph.OutputFile
+ var metafile string
if !log.HasErrors() {
// Compile the bundle
result.MangleCache = cloneMangleCache(log, args.mangleCache)
- results, metafile := bundle.Compile(log, timer, result.MangleCache, linker.Link)
+ results, metafile = bundle.Compile(log, timer, result.MangleCache, linker.Link)
// Canceling a build generates a single error at the end of the build
if args.options.CancelFlag.DidCancel() {
@@ -1489,92 +1499,94 @@ func rebuildImpl(args rebuildArgs, oldHashes map[string]string) (rebuildState, m
// Stop now if there were errors
if !log.HasErrors() {
result.Metafile = metafile
+ }
+ }
- // Populate the results to return
- var hashBytes [8]byte
- result.OutputFiles = make([]OutputFile, len(results))
- newHashes = make(map[string]string)
- for i, item := range results {
- if args.options.WriteToStdout {
- item.AbsPath = ""
- }
- hasher := xxhash.New()
- hasher.Write(item.Contents)
- binary.LittleEndian.PutUint64(hashBytes[:], hasher.Sum64())
- hash := base64.RawStdEncoding.EncodeToString(hashBytes[:])
- result.OutputFiles[i] = OutputFile{
- Path: item.AbsPath,
- Contents: item.Contents,
- Hash: hash,
+ // Populate the results to return
+ var hashBytes [8]byte
+ result.OutputFiles = make([]OutputFile, len(results))
+ newHashes = make(map[string]string)
+ for i, item := range results {
+ if args.options.WriteToStdout {
+ item.AbsPath = ""
+ }
+ hasher := xxhash.New()
+ hasher.Write(item.Contents)
+ binary.LittleEndian.PutUint64(hashBytes[:], hasher.Sum64())
+ hash := base64.RawStdEncoding.EncodeToString(hashBytes[:])
+ result.OutputFiles[i] = OutputFile{
+ Path: item.AbsPath,
+ Contents: item.Contents,
+ Hash: hash,
+ }
+ newHashes[item.AbsPath] = hash
+ }
+
+ // Write output files before "OnEnd" callbacks run so they can expect
+ // output files to exist on the file system. "OnEnd" callbacks can be
+ // used to move output files to a different location after the build.
+ if args.write {
+ timer.Begin("Write output files")
+ if args.options.WriteToStdout {
+ // Special-case writing to stdout
+ if log.HasErrors() {
+ // No output is printed if there were any build errors
+ } else if len(results) != 1 {
+ log.AddError(nil, logger.Range{}, fmt.Sprintf(
+ "Internal error: did not expect to generate %d files when writing to stdout", len(results)))
+ } else {
+ // Print this later on, at the end of the current function
+ toWriteToStdout = results[0].Contents
+ }
+ } else {
+ // Delete old files that are no longer relevant
+ var toDelete []string
+ for absPath := range oldHashes {
+ if _, ok := newHashes[absPath]; !ok {
+ toDelete = append(toDelete, absPath)
}
- newHashes[item.AbsPath] = hash
}
- // Write output files before "OnEnd" callbacks run so they can expect
- // output files to exist on the file system. "OnEnd" callbacks can be
- // used to move output files to a different location after the build.
- if args.write {
- timer.Begin("Write output files")
- if args.options.WriteToStdout {
- // Special-case writing to stdout
- if len(results) != 1 {
+ // Process all file operations in parallel
+ waitGroup := sync.WaitGroup{}
+ waitGroup.Add(len(results) + len(toDelete))
+ for _, result := range results {
+ go func(result graph.OutputFile) {
+ defer waitGroup.Done()
+ fs.BeforeFileOpen()
+ defer fs.AfterFileClose()
+ if oldHash, ok := oldHashes[result.AbsPath]; ok && oldHash == newHashes[result.AbsPath] {
+ if contents, err := ioutil.ReadFile(result.AbsPath); err == nil && bytes.Equal(contents, result.Contents) {
+ // Skip writing out files that haven't changed since last time
+ return
+ }
+ }
+ if err := fs.MkdirAll(realFS, realFS.Dir(result.AbsPath), 0755); err != nil {
log.AddError(nil, logger.Range{}, fmt.Sprintf(
- "Internal error: did not expect to generate %d files when writing to stdout", len(results)))
+ "Failed to create output directory: %s", err.Error()))
} else {
- // Print this later on, at the end of the current function
- toWriteToStdout = results[0].Contents
- }
- } else {
- // Delete old files that are no longer relevant
- var toDelete []string
- for absPath := range oldHashes {
- if _, ok := newHashes[absPath]; !ok {
- toDelete = append(toDelete, absPath)
+ var mode os.FileMode = 0666
+ if result.IsExecutable {
+ mode = 0777
+ }
+ if err := ioutil.WriteFile(result.AbsPath, result.Contents, mode); err != nil {
+ log.AddError(nil, logger.Range{}, fmt.Sprintf(
+ "Failed to write to output file: %s", err.Error()))
}
}
-
- // Process all file operations in parallel
- waitGroup := sync.WaitGroup{}
- waitGroup.Add(len(results) + len(toDelete))
- for _, result := range results {
- go func(result graph.OutputFile) {
- defer waitGroup.Done()
- fs.BeforeFileOpen()
- defer fs.AfterFileClose()
- if oldHash, ok := oldHashes[result.AbsPath]; ok && oldHash == newHashes[result.AbsPath] {
- if contents, err := ioutil.ReadFile(result.AbsPath); err == nil && bytes.Equal(contents, result.Contents) {
- // Skip writing out files that haven't changed since last time
- return
- }
- }
- if err := fs.MkdirAll(realFS, realFS.Dir(result.AbsPath), 0755); err != nil {
- log.AddError(nil, logger.Range{}, fmt.Sprintf(
- "Failed to create output directory: %s", err.Error()))
- } else {
- var mode os.FileMode = 0666
- if result.IsExecutable {
- mode = 0777
- }
- if err := ioutil.WriteFile(result.AbsPath, result.Contents, mode); err != nil {
- log.AddError(nil, logger.Range{}, fmt.Sprintf(
- "Failed to write to output file: %s", err.Error()))
- }
- }
- }(result)
- }
- for _, absPath := range toDelete {
- go func(absPath string) {
- defer waitGroup.Done()
- fs.BeforeFileOpen()
- defer fs.AfterFileClose()
- os.Remove(absPath)
- }(absPath)
- }
- waitGroup.Wait()
- }
- timer.End("Write output files")
+ }(result)
+ }
+ for _, absPath := range toDelete {
+ go func(absPath string) {
+ defer waitGroup.Done()
+ fs.BeforeFileOpen()
+ defer fs.AfterFileClose()
+ os.Remove(absPath)
+ }(absPath)
}
+ waitGroup.Wait()
}
+ timer.End("Write output files")
}
// Only return the mangle cache for a successful build
@@ -1707,7 +1719,7 @@ func transformImpl(input string, transformOpts TransformOptions) TransformResult
SourceRoot: transformOpts.SourceRoot,
ExcludeSourcesContent: transformOpts.SourcesContent == SourcesContentExclude,
OutputFormat: validateFormat(transformOpts.Format),
- GlobalName: validateGlobalName(log, transformOpts.GlobalName),
+ GlobalName: validateGlobalName(log, transformOpts.GlobalName, "(global name)"),
MinifySyntax: transformOpts.MinifySyntax,
MinifyWhitespace: transformOpts.MinifyWhitespace,
MinifyIdentifiers: transformOpts.MinifyIdentifiers,
diff --git a/pkg/api/serve_other.go b/pkg/api/serve_other.go
index 0f5f3aa9b77..9f95da325a0 100644
--- a/pkg/api/serve_other.go
+++ b/pkg/api/serve_other.go
@@ -48,6 +48,7 @@ type apiHandler struct {
keyfileToLower string
certfileToLower string
fallback string
+ hosts []string
serveWaitGroup sync.WaitGroup
activeStreams []chan serverSentEvent
currentHashes map[string]string
@@ -103,12 +104,6 @@ func errorsToString(errors []Message) string {
func (h *apiHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
start := time.Now()
- // Special-case the esbuild event stream
- if req.Method == "GET" && req.URL.Path == "/esbuild" && req.Header.Get("Accept") == "text/event-stream" {
- h.serveEventStream(start, req, res)
- return
- }
-
// HEAD requests omit the body
maybeWriteResponseBody := func(bytes []byte) { res.Write(bytes) }
isHEAD := req.Method == "HEAD"
@@ -116,9 +111,37 @@ func (h *apiHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
maybeWriteResponseBody = func([]byte) { res.Write(nil) }
}
+ // Check the "Host" header to prevent DNS rebinding attacks
+ if strings.ContainsRune(req.Host, ':') {
+ // Try to strip off the port number
+ if host, _, err := net.SplitHostPort(req.Host); err == nil {
+ req.Host = host
+ }
+ }
+ if req.Host != "localhost" {
+ ok := false
+ for _, allowed := range h.hosts {
+ if req.Host == allowed {
+ ok = true
+ break
+ }
+ }
+ if !ok {
+ go h.notifyRequest(time.Since(start), req, http.StatusForbidden)
+ res.WriteHeader(http.StatusForbidden)
+ maybeWriteResponseBody([]byte(fmt.Sprintf("403 - Forbidden: The host %q is not allowed", req.Host)))
+ return
+ }
+ }
+
+ // Special-case the esbuild event stream
+ if req.Method == "GET" && req.URL.Path == "/esbuild" && req.Header.Get("Accept") == "text/event-stream" {
+ h.serveEventStream(start, req, res)
+ return
+ }
+
// Handle GET and HEAD requests
if (isHEAD || req.Method == "GET") && strings.HasPrefix(req.URL.Path, "/") {
- res.Header().Set("Access-Control-Allow-Origin", "*")
queryPath := path.Clean(req.URL.Path)[1:]
result := h.rebuild()
@@ -360,7 +383,6 @@ func (h *apiHandler) serveEventStream(start time.Time, req *http.Request, res ht
res.Header().Set("Content-Type", "text/event-stream")
res.Header().Set("Connection", "keep-alive")
res.Header().Set("Cache-Control", "no-cache")
- res.Header().Set("Access-Control-Allow-Origin", "*")
go h.notifyRequest(time.Since(start), req, http.StatusOK)
res.WriteHeader(http.StatusOK)
res.Write([]byte("retry: 500\n"))
@@ -773,7 +795,11 @@ func (ctx *internalContext) Serve(serveOptions ServeOptions) (ServeResult, error
}
if listener == nil {
// Otherwise pick the provided port
- if result, err := net.Listen(network, net.JoinHostPort(host, fmt.Sprintf("%d", serveOptions.Port))); err != nil {
+ port := serveOptions.Port
+ if port < 0 || port > 0xFFFF {
+ port = 0 // Pick a random port if the provided port is out of range
+ }
+ if result, err := net.Listen(network, net.JoinHostPort(host, fmt.Sprintf("%d", port))); err != nil {
return ServeResult{}, err
} else {
listener = result
@@ -785,11 +811,26 @@ func (ctx *internalContext) Serve(serveOptions ServeOptions) (ServeResult, error
// Extract the real port in case we passed a port of "0"
var result ServeResult
+ var boundHost string
if host, text, err := net.SplitHostPort(addr); err == nil {
if port, err := strconv.ParseInt(text, 10, 32); err == nil {
result.Port = uint16(port)
- result.Host = host
+ boundHost = host
+ }
+ }
+
+ // Build up a list of all hosts we use
+ if ip := net.ParseIP(boundHost); ip != nil && ip.IsUnspecified() {
+ // If this is "0.0.0.0" or "::", list all relevant IP addresses
+ if addrs, err := net.InterfaceAddrs(); err == nil {
+ for _, addr := range addrs {
+ if addr, ok := addr.(*net.IPNet); ok && (addr.IP.To4() != nil) == (ip.To4() != nil) && !addr.IP.IsLinkLocalUnicast() {
+ result.Hosts = append(result.Hosts, addr.IP.String())
+ }
+ }
}
+ } else {
+ result.Hosts = append(result.Hosts, boundHost)
}
// HTTPS-related files should be absolute paths
@@ -811,6 +852,7 @@ func (ctx *internalContext) Serve(serveOptions ServeOptions) (ServeResult, error
keyfileToLower: strings.ToLower(serveOptions.Keyfile),
certfileToLower: strings.ToLower(serveOptions.Certfile),
fallback: serveOptions.Fallback,
+ hosts: append([]string{}, result.Hosts...),
rebuild: func() BuildResult {
if atomic.LoadInt32(&shouldStop) != 0 {
// Don't start more rebuilds if we were told to stop
@@ -901,7 +943,7 @@ func (ctx *internalContext) Serve(serveOptions ServeOptions) (ServeResult, error
// Print the URL(s) that the server can be reached at
if ctx.args.logOptions.LogLevel <= logger.LevelInfo {
- printURLs(result.Host, result.Port, isHTTPS, ctx.args.logOptions.Color)
+ printURLs(handler.hosts, result.Port, isHTTPS, ctx.args.logOptions.Color)
}
// Start the first build shortly after this function returns (but not
@@ -937,28 +979,11 @@ func (hack *hackListener) Accept() (net.Conn, error) {
return hack.Listener.Accept()
}
-func printURLs(host string, port uint16, https bool, useColor logger.UseColor) {
+func printURLs(hosts []string, port uint16, https bool, useColor logger.UseColor) {
logger.PrintTextWithColor(os.Stderr, useColor, func(colors logger.Colors) string {
- var hosts []string
sb := strings.Builder{}
sb.WriteString(colors.Reset)
- // If this is "0.0.0.0" or "::", list all relevant IP addresses
- if ip := net.ParseIP(host); ip != nil && ip.IsUnspecified() {
- if addrs, err := net.InterfaceAddrs(); err == nil {
- for _, addr := range addrs {
- if addr, ok := addr.(*net.IPNet); ok && (addr.IP.To4() != nil) == (ip.To4() != nil) && !addr.IP.IsLinkLocalUnicast() {
- hosts = append(hosts, addr.IP.String())
- }
- }
- }
- }
-
- // Otherwise, just list the one IP address
- if len(hosts) == 0 {
- hosts = append(hosts, host)
- }
-
// Determine the host kinds
kinds := make([]string, len(hosts))
maxLen := 0
diff --git a/pkg/cli/cli_impl.go b/pkg/cli/cli_impl.go
index 9a617069126..452cf75f1a1 100644
--- a/pkg/cli/cli_impl.go
+++ b/pkg/cli/cli_impl.go
@@ -1370,7 +1370,7 @@ func runImpl(osArgs []string, plugins []api.Plugin) int {
func parseServeOptionsImpl(osArgs []string) (api.ServeOptions, []string, error) {
host := ""
- portText := "0"
+ portText := ""
servedir := ""
keyfile := ""
certfile := ""
@@ -1397,8 +1397,8 @@ func parseServeOptionsImpl(osArgs []string) (api.ServeOptions, []string, error)
}
// Specifying the host is optional
+ var err error
if strings.ContainsRune(portText, ':') {
- var err error
host, portText, err = net.SplitHostPort(portText)
if err != nil {
return api.ServeOptions{}, nil, err
@@ -1406,16 +1406,24 @@ func parseServeOptionsImpl(osArgs []string) (api.ServeOptions, []string, error)
}
// Parse the port
- port, err := strconv.ParseInt(portText, 10, 32)
- if err != nil {
- return api.ServeOptions{}, nil, err
- }
- if port < 0 || port > 0xFFFF {
- return api.ServeOptions{}, nil, fmt.Errorf("Invalid port number: %s", portText)
+ var port int64
+ if portText != "" {
+ port, err = strconv.ParseInt(portText, 10, 32)
+ if err != nil {
+ return api.ServeOptions{}, nil, err
+ }
+ if port < 0 || port > 0xFFFF {
+ return api.ServeOptions{}, nil, fmt.Errorf("Invalid port number: %s", portText)
+ }
+ if port == 0 {
+ // 0 is the default value in Go, which we interpret as "try to
+ // pick port 8000". So Go uses -1 as the sentinel value instead.
+ port = -1
+ }
}
return api.ServeOptions{
- Port: uint16(port),
+ Port: int(port),
Host: host,
Servedir: servedir,
Keyfile: keyfile,
diff --git a/scripts/browser/browser-tests.js b/scripts/browser/browser-tests.js
index ad05e84bd06..b67c0ab8d38 100644
--- a/scripts/browser/browser-tests.js
+++ b/scripts/browser/browser-tests.js
@@ -79,7 +79,19 @@ console.log(`[http] listening on ${serverURL}`)
async function main() {
let allTestsPassed = true
try {
- const browser = await require('puppeteer').launch()
+ const browser = await require('puppeteer').launch({
+ // This is here because since December 2024, GitHub changed something about
+ // their CI that causes this error:
+ //
+ // [FATAL:zygote_host_impl_linux.cc(128)] No usable sandbox! If you are running
+ // on Ubuntu 23.10+ or another Linux distro that has disabled unprivileged user
+ // namespaces with AppArmor, see https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md.
+ // Otherwise see https://chromium.googlesource.com/chromium/src/+/main/docs/linux/suid_sandbox_development.md
+ // for more information on developing with the (older) SUID sandbox. If you want
+ // to live dangerously and need an immediate workaround, you can try using
+ // --no-sandbox.
+ args: ['--no-sandbox'],
+ })
const page = await browser.newPage()
page.on('console', obj => {
@@ -87,14 +99,14 @@ async function main() {
})
page.exposeFunction('testBegin', args => {
- const { esm, min, worker, mime, approach } = JSON.parse(args)
- console.log(`💬 config: esm=${esm}, min=${min}, worker=${worker}, mime=${mime}, approach=${approach}`)
+ const config = Object.entries(JSON.parse(args)).map(([k, v]) => `${k}=${v}`).join(', ')
+ console.log(`💬 config: ${config}`)
})
page.exposeFunction('testEnd', args => {
if (args === null) console.log(`👍 success`)
else {
- const { test, stack, error } = JSON.parse(args)
+ const { test, error } = JSON.parse(args)
console.log(`❌ error${test ? ` [${test}]` : ``}: ${error}`)
allTestsPassed = false
}
diff --git a/scripts/browser/index.html b/scripts/browser/index.html
index e0a8f1ec5ff..a73a6c5d672 100644
--- a/scripts/browser/index.html
+++ b/scripts/browser/index.html
@@ -1,5 +1,158 @@
+
+
+
+
+
+
+
+
+
`);
// The server should support implicit "index.html" extensions on entry point files
- const implicitHTML = await fetch(server.host, server.port, '/out/')
+ const implicitHTML = await fetch(server.hosts[0], server.port, '/out/')
assert.strictEqual(implicitHTML.toString(), ``);
// Make a change to the HTML
@@ -4807,7 +4861,7 @@ let serveTests = {
const server = await context.serve({
host: '127.0.0.1',
})
- const stream = await makeEventStream(server.host, server.port, '/esbuild')
+ const stream = await makeEventStream(server.hosts[0], server.port, '/esbuild')
await context.rebuild().then(
() => Promise.reject(new Error('Expected an error to be thrown')),
() => { /* Ignore the build error due to the missing JS file */ },
@@ -4879,7 +4933,7 @@ let serveTests = {
host: '127.0.0.1',
servedir: testDir,
})
- const stream = await makeEventStream(server.host, server.port, '/esbuild')
+ const stream = await makeEventStream(server.hosts[0], server.port, '/esbuild')
await context.rebuild().then(
() => Promise.reject(new Error('Expected an error to be thrown')),
() => { /* Ignore the build error due to the missing JS file */ },
@@ -4951,7 +5005,7 @@ let serveTests = {
const server = await context.serve({
host: '127.0.0.1',
})
- const stream = await makeEventStream(server.host, server.port, '/esbuild')
+ const stream = await makeEventStream(server.hosts[0], server.port, '/esbuild')
await context.rebuild().then(
() => Promise.reject(new Error('Expected an error to be thrown')),
() => { /* Ignore the build error due to the missing JS file */ },
@@ -5024,7 +5078,7 @@ let serveTests = {
host: '127.0.0.1',
servedir: testDir,
})
- const stream = await makeEventStream(server.host, server.port, '/esbuild')
+ const stream = await makeEventStream(server.hosts[0], server.port, '/esbuild')
await context.rebuild().then(
() => Promise.reject(new Error('Expected an error to be thrown')),
() => { /* Ignore the build error due to the missing JS file */ },
@@ -5099,29 +5153,158 @@ let serveTests = {
servedir: wwwDir,
fallback,
})
- assert.strictEqual(result.host, '127.0.0.1');
+ assert.deepStrictEqual(result.hosts, ['127.0.0.1']);
assert.strictEqual(typeof result.port, 'number');
let buffer;
- buffer = await fetch(result.host, result.port, '/in.js')
+ buffer = await fetch(result.hosts[0], result.port, '/in.js')
assert.strictEqual(buffer.toString(), `console.log(123);\n`);
- buffer = await fetch(result.host, result.port, '/')
+ buffer = await fetch(result.hosts[0], result.port, '/')
assert.strictEqual(buffer.toString(), `fallback
`);
- buffer = await fetch(result.host, result.port, '/app/')
+ buffer = await fetch(result.hosts[0], result.port, '/app/')
assert.strictEqual(buffer.toString(), `index
`);
- buffer = await fetch(result.host, result.port, '/app/?foo')
+ buffer = await fetch(result.hosts[0], result.port, '/app/?foo')
assert.strictEqual(buffer.toString(), `index
`);
- buffer = await fetch(result.host, result.port, '/app/foo')
+ buffer = await fetch(result.hosts[0], result.port, '/app/foo')
assert.strictEqual(buffer.toString(), `fallback
`);
} finally {
await context.dispose();
}
},
+
+ async serveHostCheckIPv4({ esbuild, testDir }) {
+ const input = path.join(testDir, 'in.js')
+ await writeFileAsync(input, `console.log(123)`)
+
+ let onRequest;
+
+ const context = await esbuild.context({
+ entryPoints: [input],
+ format: 'esm',
+ outdir: testDir,
+ write: false,
+ });
+ try {
+ const result = await context.serve({
+ port: 0,
+ onRequest: args => onRequest(args),
+ })
+ assert(result.hosts.includes('127.0.0.1'));
+ assert.strictEqual(typeof result.port, 'number');
+
+ // GET /in.js from each host
+ for (const host of result.hosts) {
+ const singleRequestPromise = new Promise(resolve => { onRequest = resolve });
+ const buffer = await fetch(host, result.port, '/in.js')
+ assert.strictEqual(buffer.toString(), `console.log(123);\n`);
+ assert.strictEqual(fs.readFileSync(input, 'utf8'), `console.log(123)`)
+
+ let singleRequest = await singleRequestPromise;
+ assert.strictEqual(singleRequest.method, 'GET');
+ assert.strictEqual(singleRequest.path, '/in.js');
+ assert.strictEqual(singleRequest.status, 200);
+ assert.strictEqual(typeof singleRequest.remoteAddress, 'string');
+ assert.strictEqual(typeof singleRequest.timeInMS, 'number');
+ }
+
+ // GET /in.js with a forbidden host header
+ const forbiddenHosts = [
+ 'evil.com',
+ 'evil.com:666',
+ '1.2.3.4',
+ '1.2.3.4:666',
+ '::1234',
+ '[::1234]:666',
+ '[',
+ ]
+ for (const forbiddenHost of forbiddenHosts) {
+ const singleRequestPromise = new Promise(resolve => { onRequest = resolve });
+ try {
+ await fetch(result.hosts[0], result.port, '/in.js', { headers: { Host: forbiddenHost } })
+ } catch {
+ }
+
+ let singleRequest = await singleRequestPromise;
+ assert.strictEqual(singleRequest.method, 'GET');
+ assert.strictEqual(singleRequest.path, '/in.js');
+ assert.strictEqual(singleRequest.status, 403, forbiddenHost); // 403 means "Forbidden"
+ assert.strictEqual(typeof singleRequest.remoteAddress, 'string');
+ assert.strictEqual(typeof singleRequest.timeInMS, 'number');
+ }
+ } finally {
+ await context.dispose();
+ }
+ },
+
+ async serveHostCheckIPv6({ esbuild, testDir }) {
+ const input = path.join(testDir, 'in.js')
+ await writeFileAsync(input, `console.log(123)`)
+
+ let onRequest;
+
+ const context = await esbuild.context({
+ entryPoints: [input],
+ format: 'esm',
+ outdir: testDir,
+ write: false,
+ });
+ try {
+ const result = await context.serve({
+ host: '::',
+ port: 0,
+ onRequest: args => onRequest(args),
+ })
+ assert(result.hosts.includes('::1'));
+ assert.strictEqual(typeof result.port, 'number');
+
+ // GET /in.js from each host
+ for (const host of result.hosts) {
+ const singleRequestPromise = new Promise(resolve => { onRequest = resolve });
+ const buffer = await fetch(host, result.port, '/in.js')
+ assert.strictEqual(buffer.toString(), `console.log(123);\n`);
+ assert.strictEqual(fs.readFileSync(input, 'utf8'), `console.log(123)`)
+
+ let singleRequest = await singleRequestPromise;
+ assert.strictEqual(singleRequest.method, 'GET');
+ assert.strictEqual(singleRequest.path, '/in.js');
+ assert.strictEqual(singleRequest.status, 200);
+ assert.strictEqual(typeof singleRequest.remoteAddress, 'string');
+ assert.strictEqual(typeof singleRequest.timeInMS, 'number');
+ }
+
+ // GET /in.js with a forbidden host header
+ const forbiddenHosts = [
+ 'evil.com',
+ 'evil.com:666',
+ '1.2.3.4',
+ '1.2.3.4:666',
+ '::1234',
+ '[::1234]:666',
+ '[',
+ ]
+ for (const forbiddenHost of forbiddenHosts) {
+ const singleRequestPromise = new Promise(resolve => { onRequest = resolve });
+ try {
+ await fetch(result.hosts[0], result.port, '/in.js', { headers: { Host: forbiddenHost } })
+ } catch {
+ }
+
+ let singleRequest = await singleRequestPromise;
+ assert.strictEqual(singleRequest.method, 'GET');
+ assert.strictEqual(singleRequest.path, '/in.js');
+ assert.strictEqual(singleRequest.status, 403, forbiddenHost); // 403 means "Forbidden"
+ assert.strictEqual(typeof singleRequest.remoteAddress, 'string');
+ assert.strictEqual(typeof singleRequest.timeInMS, 'number');
+ }
+ } finally {
+ await context.dispose();
+ }
+ },
}
async function futureSyntax(esbuild, js, targetBelow, targetAbove) {
@@ -5953,6 +6136,22 @@ class Foo {
π["π 𐀀"]["𐀀"]["𐀀 π"] = `)
},
+ async iifeGlobalNameThis({ esbuild }) {
+ const { code } = await esbuild.transform(`export default 123`, { format: 'iife', globalName: 'this.foo.bar' })
+ const globals = {}
+ vm.createContext(globals)
+ vm.runInContext(code, globals)
+ assert.strictEqual(globals.foo.bar.default, 123)
+ assert.strictEqual(code.slice(0, code.indexOf('(() => {\n')), `(this.foo ||= {}).bar = `)
+ },
+
+ async iifeGlobalNameImportMeta({ esbuild }) {
+ const { code } = await esbuild.transform(`export default 123`, { format: 'iife', globalName: 'import.meta.foo.bar' })
+ const { default: import_meta } = await import('data:text/javascript,' + code + '\nexport default import.meta')
+ assert.strictEqual(import_meta.foo.bar.default, 123)
+ assert.strictEqual(code.slice(0, code.indexOf('(() => {\n')), `(import.meta.foo ||= {}).bar = `)
+ },
+
async jsx({ esbuild }) {
const { code } = await esbuild.transform(`console.log()`, { loader: 'jsx' })
assert.strictEqual(code, `console.log(/* @__PURE__ */ React.createElement("div", null));\n`)
@@ -6009,17 +6208,57 @@ class Foo {
},
async dropConsole({ esbuild }) {
- const { code } = await esbuild.transform(`
- console('foo')
+ const { code: drop } = await esbuild.transform(`
console.log('foo')
console.log(foo())
+ console.log.call(console, foo())
+ console.log.apply(console, foo())
x = console.log(bar())
+ console['log']('foo')
+ `, { drop: ['console'] })
+ assert.strictEqual(drop, `x = void 0;\n`)
+
+ const { code: keepArrow } = await esbuild.transform(`
+ console('foo')
console.abc.xyz('foo')
console['log']('foo')
console[abc][xyz]('foo')
console[foo()][bar()]('foo')
+ const x = {
+ log: console.log.bind(console),
+ }
`, { drop: ['console'] })
- assert.strictEqual(code, `console("foo");\nx = void 0;\n`)
+ assert.strictEqual(keepArrow, `console("foo");
+(() => {
+}).xyz("foo");
+console[abc][xyz]("foo");
+console[foo()][bar()]("foo");
+const x = {
+ log: (() => {
+ }).bind(console)
+};
+`)
+
+ const { code: keepFn } = await esbuild.transform(`
+ console('foo')
+ console.abc.xyz('foo')
+ console['log']('foo')
+ console[abc][xyz]('foo')
+ console[foo()][bar()]('foo')
+ const x = {
+ log: console.log.bind(console),
+ }
+ `, { drop: ['console'], supported: { arrow: false } })
+ assert.strictEqual(keepFn, `console("foo");
+(function() {
+}).xyz("foo");
+console[abc][xyz]("foo");
+console[foo()][bar()]("foo");
+const x = {
+ log: function() {
+ }.bind(console)
+};
+`)
},
async keepDebugger({ esbuild }) {
@@ -6032,7 +6271,7 @@ class Foo {
assert.strictEqual(code, `if (x) ;\n`)
},
- async define({ esbuild }) {
+ async defineProcessEnvNodeEnv({ esbuild }) {
const define = { 'process.env.NODE_ENV': '"something"' }
const { code: code1 } = await esbuild.transform(`console.log(process.env.NODE_ENV)`, { define })
@@ -6067,13 +6306,19 @@ class Foo {
},
async defineThis({ esbuild }) {
- const { code } = await esbuild.transform(`console.log(a, b); export {}`, { define: { a: 'this', b: 'this.foo' }, format: 'esm' })
- assert.strictEqual(code, `console.log(void 0, (void 0).foo);\n`)
+ const { code: code1 } = await esbuild.transform(`console.log(a, b); export {}`, { define: { a: 'this', b: 'this.foo' }, format: 'esm' })
+ assert.strictEqual(code1, `console.log(void 0, (void 0).foo);\n`)
+
+ const { code: code2 } = await esbuild.transform(`console.log(this, this.x); export {}`, { define: { this: 'a', 'this.x': 'b' }, format: 'esm' })
+ assert.strictEqual(code2, `console.log(a, b);\n`)
},
async defineImportMetaESM({ esbuild }) {
- const { code } = await esbuild.transform(`console.log(a, b); export {}`, { define: { a: 'import.meta', b: 'import.meta.foo' }, format: 'esm' })
- assert.strictEqual(code, `console.log(import.meta, import.meta.foo);\n`)
+ const { code: code1 } = await esbuild.transform(`console.log(a, b); export {}`, { define: { a: 'import.meta', b: 'import.meta.foo' }, format: 'esm' })
+ assert.strictEqual(code1, `console.log(import.meta, import.meta.foo);\n`)
+
+ const { code: code2 } = await esbuild.transform(`console.log(import.meta, import.meta.x); export {}`, { define: { 'import.meta': 'a', 'import.meta.x': 'b' }, format: 'esm' })
+ assert.strictEqual(code2, `console.log(a, b);\n`)
},
async defineImportMetaIIFE({ esbuild }) {
@@ -6109,6 +6354,50 @@ class Foo {
`)
},
+ async defineQuotedPropertyNameTransform({ esbuild }) {
+ const { code: code1 } = await esbuild.transform(`return x.y['z!']`, { define: { 'x.y["z!"]': 'true' } })
+ assert.strictEqual(code1, `return true;\n`)
+
+ const { code: code2 } = await esbuild.transform(`foo(x['y'].z, x.y['z'], x['y']['z'])`, { define: { 'x.y.z': 'true' } })
+ assert.strictEqual(code2, `foo(true, true, true);\n`)
+
+ const { code: code3 } = await esbuild.transform(`foo(x['y'].z, x.y['z'], x['y']['z'])`, { define: { 'x["y"].z': 'true' } })
+ assert.strictEqual(code3, `foo(true, true, true);\n`)
+
+ const { code: code4 } = await esbuild.transform(`foo(x['y'].z, x.y['z'], x['y']['z'])`, { define: { 'x.y["z"]': 'true' } })
+ assert.strictEqual(code4, `foo(true, true, true);\n`)
+
+ const { code: code5 } = await esbuild.transform(`foo(x['y'].z, x.y['z'], x['y']['z'])`, { define: { 'x["y"][\'z\']': 'true' } })
+ assert.strictEqual(code5, `foo(true, true, true);\n`)
+
+ const { code: code6 } = await esbuild.transform(`foo(import.meta['y'].z, import.meta.y['z'], import.meta['y']['z'])`, { define: { 'import.meta["y"].z': 'true' } })
+ assert.strictEqual(code6, `foo(true, true, true);\n`)
+
+ const { code: code7 } = await esbuild.transform(`foo(import.meta['y!'].z, import.meta.y['z!'], import.meta['y!']['z!'])`, {
+ define: {
+ 'import.meta["y!"].z': 'true',
+ 'import.meta.y["z!"]': 'true',
+ 'import.meta["y!"]["z!"]': 'true'
+ },
+ })
+ assert.strictEqual(code7, `foo(true, true, true);\n`)
+ },
+
+
+ async defineQuotedPropertyNameBuild({ esbuild }) {
+ const { outputFiles } = await esbuild.build({
+ stdin: { contents: `return process.env['SOME-TEST-VAR']` },
+ define: { 'process.env["SOME-TEST-VAR"]': 'true' },
+ write: false,
+ })
+ assert.strictEqual(outputFiles[0].text, `return true;\n`)
+ },
+
+ async defineBigInt({ esbuild }) {
+ const { code } = await esbuild.transform(`console.log(a, b); export {}`, { define: { a: '0n', b: '{"x":[123n]}' }, format: 'esm' })
+ assert.strictEqual(code, `var define_b_default = { x: [123n] };\nconsole.log(0n, define_b_default);\n`)
+ },
+
async json({ esbuild }) {
const { code } = await esbuild.transform(`{ "x": "y" }`, { loader: 'json' })
assert.strictEqual(code, `module.exports = { x: "y" };\n`)
@@ -6444,6 +6733,14 @@ class Foo {
assert.strictEqual(code2, `foo;\n`)
},
+ async pureImportMeta({ esbuild }) {
+ const { code: code1 } = await esbuild.transform(`import.meta.foo(123, foo)`, { minifySyntax: true, pure: [] })
+ assert.strictEqual(code1, `import.meta.foo(123, foo);\n`)
+
+ const { code: code2 } = await esbuild.transform(`import.meta.foo(123, foo)`, { minifySyntax: true, pure: ['import.meta.foo'] })
+ assert.strictEqual(code2, `foo;\n`)
+ },
+
async nameCollisionEvalRename({ esbuild }) {
const { code } = await esbuild.transform(`
// "arg" must not be renamed to "arg2"
@@ -6747,11 +7044,11 @@ class Foo {
async multipleEngineTargetsNotSupported({ esbuild }) {
try {
- await esbuild.transform(`0n`, { target: ['es5', 'chrome1', 'safari2', 'firefox3'] })
+ await esbuild.transform(`class X {}`, { target: ['es5', 'chrome1', 'safari2', 'firefox3'] })
throw new Error('Expected an error to be thrown')
} catch (e) {
assert.strictEqual(e.errors[0].text,
- 'Big integer literals are not available in the configured target environment ("chrome1", "es5", "firefox3", "safari2")')
+ 'Transforming class syntax to the configured target environment ("chrome1", "es5", "firefox3", "safari2") is not supported yet')
}
},
@@ -6775,12 +7072,12 @@ class Foo {
check({ supported: { arrow: false }, target: 'es2022' }, `x = () => y`, `x = function() {\n return y;\n};\n`),
// JS: error
- check({ supported: { bigint: true } }, `x = 1n`, `x = 1n;\n`),
- check({ supported: { bigint: false } }, `x = 1n`, `Big integer literals are not available in the configured target environment`),
- check({ supported: { bigint: true }, target: 'es5' }, `x = 1n`, `x = 1n;\n`),
- check({ supported: { bigint: false }, target: 'es5' }, `x = 1n`, `Big integer literals are not available in the configured target environment ("es5" + 1 override)`),
- check({ supported: { bigint: true }, target: 'es2022' }, `x = 1n`, `x = 1n;\n`),
- check({ supported: { bigint: false }, target: 'es2022' }, `x = 1n`, `Big integer literals are not available in the configured target environment ("es2022" + 1 override)`),
+ check({ supported: { class: true } }, `class X {}`, `class X {\n}\n`),
+ check({ supported: { class: false } }, `class X {}`, `Transforming class syntax to the configured target environment is not supported yet`),
+ check({ supported: { class: true }, target: 'es5' }, `class X {}`, `class X {\n}\n`),
+ check({ supported: { class: false }, target: 'es5' }, `class X {}`, `Transforming class syntax to the configured target environment ("es5" + 11 overrides) is not supported yet`),
+ check({ supported: { class: true }, target: 'es6' }, `class X {}`, `class X {\n}\n`),
+ check({ supported: { class: false }, target: 'es6' }, `class X {}`, `Transforming class syntax to the configured target environment ("es2015" + 11 overrides) is not supported yet`),
// CSS: lower
check({ supported: { 'hex-rgba': true }, loader: 'css' }, `a { color: #1234 }`, `a {\n color: #1234;\n}\n`),
@@ -6789,7 +7086,7 @@ class Foo {
check({ target: 'safari15.4', loader: 'css' }, `a { mask-image: url(x.png) }`, `a {\n mask-image: url(x.png);\n}\n`),
// Check for "+ 2 overrides"
- check({ supported: { bigint: false, arrow: true }, target: 'es2022' }, `x = 1n`, `Big integer literals are not available in the configured target environment ("es2022" + 2 overrides)`),
+ check({ supported: { class: false, arrow: true }, target: 'es2022' }, `class X {}`, `Transforming class syntax to the configured target environment ("es2022" + 12 overrides) is not supported yet`),
])
},
@@ -6833,9 +7130,6 @@ class Foo {
},
// Future syntax
- bigInt: ({ esbuild }) => futureSyntax(esbuild, '123n', 'es2019', 'es2020'),
- bigIntKey: ({ esbuild }) => futureSyntax(esbuild, '({123n: 0})', 'es2019', 'es2020'),
- bigIntPattern: ({ esbuild }) => futureSyntax(esbuild, 'let {123n: x} = y', 'es2019', 'es2020'),
nonIdArrayRest: ({ esbuild }) => futureSyntax(esbuild, 'let [...[x]] = y', 'es2015', 'es2016'),
topLevelAwait: ({ esbuild }) => futureSyntax(esbuild, 'await foo', 'es2020', 'esnext'),
topLevelForAwait: ({ esbuild }) => futureSyntax(esbuild, 'for await (foo of bar) ;', 'es2020', 'esnext'),
@@ -7356,7 +7650,7 @@ let childProcessTests = {
},
}
-let syncTests = {
+let serialTests = {
async startStop({ esbuild }) {
for (let i = 0; i < 3; i++) {
let result1 = await esbuild.transform('1+2')
@@ -7398,7 +7692,6 @@ async function main() {
process.exit(1)
}, minutes * 60 * 1000)
- // Run all tests concurrently
const runTest = async (name, fn) => {
let testDir = path.join(rootTestDir, name)
try {
@@ -7423,6 +7716,7 @@ async function main() {
...Object.entries(childProcessTests),
]
+ // Run everything in "tests" concurrently
let allTestsPassed = (await Promise.all(tests.map(([name, fn]) => {
const promise = runTest(name, fn)
@@ -7435,7 +7729,8 @@ async function main() {
return promise.finally(() => clearTimeout(timeout))
}))).every(success => success)
- for (let [name, fn] of Object.entries(syncTests)) {
+ // Run everything in "serialTests" in serial
+ for (let [name, fn] of Object.entries(serialTests)) {
if (!await runTest(name, fn)) {
allTestsPassed = false
}
diff --git a/scripts/node-unref-tests.js b/scripts/node-unref-tests.js
index 187b61d0a80..151619498da 100644
--- a/scripts/node-unref-tests.js
+++ b/scripts/node-unref-tests.js
@@ -20,7 +20,7 @@ async function tests() {
const context = await esbuild.context({})
try {
const server = await context.serve({})
- assert.strictEqual(server.host, '0.0.0.0')
+ assert(server.hosts.includes('127.0.0.1'))
assert.strictEqual(typeof server.port, 'number')
} finally {
await context.dispose()
diff --git a/scripts/plugin-tests.js b/scripts/plugin-tests.js
index 0a4400f2d28..c89845e9d5f 100644
--- a/scripts/plugin-tests.js
+++ b/scripts/plugin-tests.js
@@ -3220,7 +3220,7 @@ let syncTests = {
// Fetch once
try {
- await fetch(server.host, server.port, '/out.js')
+ await fetch(server.hosts[0], server.port, '/out.js')
throw new Error('Expected an error to be thrown')
} catch (err) {
assert.strictEqual(err.statusCode, 503)
@@ -3232,7 +3232,7 @@ let syncTests = {
await writeFileAsync(input, `console.log(1+2)`)
// Fetch again
- const buffer = await fetchUntilSuccessOrTimeout(server.host, server.port, '/out.js')
+ const buffer = await fetchUntilSuccessOrTimeout(server.hosts[0], server.port, '/out.js')
assert.strictEqual(buffer.toString(), 'console.log(1 + 2);\n')
assert.strictEqual(latestResult.errors.length, 0)
assert.strictEqual(latestResult.outputFiles, undefined)
@@ -3270,7 +3270,7 @@ let syncTests = {
// Fetch once
try {
- await fetch(server.host, server.port, '/out.js')
+ await fetch(server.hosts[0], server.port, '/out.js')
throw new Error('Expected an error to be thrown')
} catch (err) {
assert.strictEqual(err.statusCode, 503)
@@ -3283,7 +3283,7 @@ let syncTests = {
await writeFileAsync(input, `console.log(1+2)`)
// Fetch again
- const buffer = await fetchUntilSuccessOrTimeout(server.host, server.port, '/out.js')
+ const buffer = await fetchUntilSuccessOrTimeout(server.hosts[0], server.port, '/out.js')
assert.strictEqual(buffer.toString(), 'console.log(1 + 2);\n')
assert.strictEqual(latestResult.errors.length, 0)
assert.strictEqual(latestResult.outputFiles.length, 1)
diff --git a/scripts/verify-source-map.js b/scripts/verify-source-map.js
index dd3f9f8185e..70f2d1f7e91 100644
--- a/scripts/verify-source-map.js
+++ b/scripts/verify-source-map.js
@@ -3,6 +3,7 @@ const { buildBinary, removeRecursiveSync } = require('./esbuild')
const childProcess = require('child_process')
const path = require('path')
const util = require('util')
+const url = require('url')
const fs = require('fs').promises
const execFileAsync = util.promisify(childProcess.execFile)
@@ -192,6 +193,27 @@ const toSearchCodeSplitting = {
out: 'out.ts',
}
+const testCaseCodeSplittingEmptyFile = {
+ 'entry1.ts': `
+ import './a.ts'
+ import './empty.ts'
+ import './b.ts'
+ `,
+ 'entry2.ts': `
+ import './a.ts'
+ import './empty.ts'
+ import './b.ts'
+ `,
+ 'a.ts': `'foo'.print()`,
+ 'empty.ts': `//! @preserve`,
+ 'b.ts': `'bar'.print()`,
+}
+
+const toSearchCodeSplittingEmptyFile = {
+ foo: 'a.ts',
+ bar: 'b.ts',
+}
+
const testCaseUnicode = {
'entry.js': `
import './a'
@@ -256,8 +278,8 @@ const testCaseComplex = {
}
const toSearchComplex = {
- '[object Array]': '../../node_modules/fuse.js/dist/webpack:/src/helpers/is_array.js',
- 'Score average:': '../../node_modules/fuse.js/dist/webpack:/src/index.js',
+ '[object Array]': 'webpack:///src/helpers/is_array.js',
+ 'Score average:': 'webpack:///src/index.js',
'0123456789': '../../node_modules/object-assign/index.js',
'forceUpdate': '../../node_modules/react/cjs/react.production.min.js',
};
@@ -399,7 +421,7 @@ console.log({ foo })
}
const toSearchMissingSourcesContent = {
- bar: 'src/foo.ts',
+ bar: 'maps/src/foo.ts',
}
// The "null" should be filled in by the contents of "bar.ts"
@@ -430,7 +452,51 @@ const toSearchNullSourcesContent = {
bar: 'bar.ts',
}
-async function check(kind, testCase, toSearch, { ext, flags, entryPoints, crlf, followUpFlags = [] }) {
+const testCaseFileNameWithSpaces = {
+ 'file name with spaces.js': `console . log ( "test" )`,
+}
+
+const toSearchFileNameWithSpaces = {
+ test: 'file name with spaces.js',
+}
+
+const testCaseAbsoluteSourceMappingURL = {
+ 'entry.js': `console.log("test");
+//# sourceMappingURL={ABSOLUTE_FILE_URL}/entry.js.map
+`,
+ 'entry.js.map': `{
+ "version": 3,
+ "sources": ["input.js"],
+ "sourcesContent": ["console . log ( \\\"test\\\" )"],
+ "mappings": "AAAA,QAAU,IAAM,MAAO;",
+ "names": []
+}
+`,
+}
+
+const toSearchAbsoluteSourceMappingURL = {
+ test: 'input.js',
+}
+
+const testCaseAbsoluteSourcesURL = {
+ 'entry.js': `console.log("test");
+//# sourceMappingURL=entry.js.map
+`,
+ 'entry.js.map': `{
+ "version": 3,
+ "sources": ["{ABSOLUTE_FILE_URL}/input.js"],
+ "sourcesContent": ["console . log ( \\\"test\\\" )"],
+ "mappings": "AAAA,QAAU,IAAM,MAAO;",
+ "names": []
+}
+`,
+}
+
+const toSearchAbsoluteSourcesURL = {
+ test: 'input.js',
+}
+
+async function check(kind, testCase, toSearch, { outfile, flags, entryPoints, crlf, followUpFlags = [], checkFirstChunk }) {
let failed = 0
try {
@@ -448,12 +514,17 @@ async function check(kind, testCase, toSearch, { ext, flags, entryPoints, crlf,
if (name !== '') {
const tempPath = path.join(tempDir, name)
let code = testCase[name]
+
+ // Make it possible to test absolute "file://" URLs
+ code = code.replace('{ABSOLUTE_FILE_URL}', url.pathToFileURL(tempDir).href)
+
await fs.mkdir(path.dirname(tempPath), { recursive: true })
if (crlf) code = code.replace(/\n/g, '\r\n')
await fs.writeFile(tempPath, code)
}
}
+ if (outfile && !flags.some(flag => flag.startsWith('--outdir='))) flags.push('--outfile=' + outfile)
const args = ['--sourcemap', '--log-level=warning'].concat(flags)
const isStdin = '' in testCase
let stdout = ''
@@ -471,16 +542,27 @@ async function check(kind, testCase, toSearch, { ext, flags, entryPoints, crlf,
let outCode
let outCodeMap
+ // Optionally check the first chunk when splitting
+ if (checkFirstChunk && flags.includes('--splitting')) {
+ const entries = await fs.readdir(tempDir)
+ for (const entry of entries.sort()) {
+ if (entry.startsWith('chunk-')) {
+ outfile = entry
+ break
+ }
+ }
+ }
+
if (isStdin) {
outCode = stdout
- recordCheck(outCode.includes(`# sourceMappingURL=data:application/json;base64,`), `.${ext} file must contain source map`)
+ recordCheck(outCode.includes(`# sourceMappingURL=data:application/json;base64,`), `stdin must contain source map`)
outCodeMap = Buffer.from(outCode.slice(outCode.indexOf('base64,') + 'base64,'.length).trim(), 'base64').toString()
}
else {
- outCode = await fs.readFile(path.join(tempDir, `out.${ext}`), 'utf8')
- recordCheck(outCode.includes(`# sourceMappingURL=out.${ext}.map`), `.${ext} file must link to .${ext}.map`)
- outCodeMap = await fs.readFile(path.join(tempDir, `out.${ext}.map`), 'utf8')
+ outCode = await fs.readFile(path.join(tempDir, outfile), 'utf8')
+ recordCheck(outCode.includes(`# sourceMappingURL=${encodeURIComponent(outfile)}.map`), `${outfile} file must link to ${outfile}.map`)
+ outCodeMap = await fs.readFile(path.join(tempDir, `${outfile}.map`), 'utf8')
}
// Check the mapping of various key locations back to the original source
@@ -494,8 +576,8 @@ async function check(kind, testCase, toSearch, { ext, flags, entryPoints, crlf,
let outColumn = outLastLine.length
const { source, line, column } = map.originalPositionFor({ line: outLine, column: outColumn })
- const inSource = isStdin ? '' : toSearch[id];
- recordCheck(source === inSource, `expected source: ${inSource}, observed source: ${source}`)
+ const inSource = isStdin ? '' : toSearch[id]
+ recordCheck(decodeURI(source) === inSource, `expected source: ${inSource}, observed source: ${source}`)
const inCode = map.sourceContentFor(source)
if (inCode === null) throw new Error(`Got null for source content for "${source}"`)
@@ -558,25 +640,27 @@ async function check(kind, testCase, toSearch, { ext, flags, entryPoints, crlf,
// Bundle again to test nested source map chaining
for (let order of [0, 1, 2]) {
- const fileToTest = isStdin ? `stdout.${ext}` : `out.${ext}`
- const nestedEntry = path.join(tempDir, `nested-entry.${ext}`)
- if (isStdin) await fs.writeFile(path.join(tempDir, fileToTest), outCode)
- await fs.writeFile(path.join(tempDir, `extra.${ext}`), `console.log('extra')`)
- const importKeyword = ext === 'css' ? '@import' : 'import'
+ const infile = isStdin ? `stdout.js` : outfile
+ const outfile2 = 'nested.' + infile
+ const nestedEntry = path.join(tempDir, `nested-entry.${infile}`)
+ if (isStdin) await fs.writeFile(path.join(tempDir, infile), outCode)
+ await fs.writeFile(path.join(tempDir, `extra.${infile}`), `console.log('extra')`)
+ const importKeyword = path.extname(infile) === '.css' ? '@import' : 'import'
await fs.writeFile(nestedEntry,
- order === 1 ? `${importKeyword} './${fileToTest}'; ${importKeyword} './extra.${ext}'` :
- order === 2 ? `${importKeyword} './extra.${ext}'; ${importKeyword} './${fileToTest}'` :
- `${importKeyword} './${fileToTest}'`)
+ order === 1 ? `${importKeyword} './${infile}'; ${importKeyword} './extra.${infile}'` :
+ order === 2 ? `${importKeyword} './extra.${infile}'; ${importKeyword} './${infile}'` :
+ `${importKeyword} './${infile}'`)
await execFileAsync(esbuildPath, [
nestedEntry,
'--bundle',
- '--outfile=' + path.join(tempDir, `out2.${ext}`),
+ '--outfile=' + path.join(tempDir, outfile2),
'--sourcemap',
+ '--format=esm',
].concat(followUpFlags), { cwd: testDir })
- const out2Code = await fs.readFile(path.join(tempDir, `out2.${ext}`), 'utf8')
- recordCheck(out2Code.includes(`# sourceMappingURL=out2.${ext}.map`), `.${ext} file must link to .${ext}.map`)
- const out2CodeMap = await fs.readFile(path.join(tempDir, `out2.${ext}.map`), 'utf8')
+ const out2Code = await fs.readFile(path.join(tempDir, outfile2), 'utf8')
+ recordCheck(out2Code.includes(`# sourceMappingURL=${encodeURIComponent(outfile2)}.map`), `${outfile2} file must link to ${outfile2}.map`)
+ const out2CodeMap = await fs.readFile(path.join(tempDir, `${outfile2}.map`), 'utf8')
const out2Map = await new SourceMapConsumer(out2CodeMap)
checkMap(out2Code, out2Map)
@@ -593,7 +677,7 @@ async function check(kind, testCase, toSearch, { ext, flags, entryPoints, crlf,
return failed
}
-async function checkNames(kind, testCase, { ext, flags, entryPoints, crlf }) {
+async function checkNames(kind, testCase, { outfile, flags, entryPoints, crlf }) {
let failed = 0
try {
@@ -615,6 +699,7 @@ async function checkNames(kind, testCase, { ext, flags, entryPoints, crlf }) {
await fs.writeFile(tempPath, code)
}
+ if (outfile) flags.push('--outfile=' + outfile)
const args = ['--sourcemap', '--log-level=warning'].concat(flags)
let stdout = ''
@@ -627,9 +712,9 @@ async function checkNames(kind, testCase, { ext, flags, entryPoints, crlf }) {
child.on('error', reject)
})
- const outCode = await fs.readFile(path.join(tempDir, `out.${ext}`), 'utf8')
- recordCheck(outCode.includes(`# sourceMappingURL=out.${ext}.map`), `.${ext} file must link to .${ext}.map`)
- const outCodeMap = await fs.readFile(path.join(tempDir, `out.${ext}.map`), 'utf8')
+ const outCode = await fs.readFile(path.join(tempDir, outfile), 'utf8')
+ recordCheck(outCode.includes(`# sourceMappingURL=${encodeURIComponent(outfile)}.map`), `${outfile} file must link to ${outfile}.map`)
+ const outCodeMap = await fs.readFile(path.join(tempDir, `${outfile}.map`), 'utf8')
// Check the mapping of various key locations back to the original source
const checkMap = (out, map) => {
@@ -700,23 +785,24 @@ async function checkNames(kind, testCase, { ext, flags, entryPoints, crlf }) {
// Bundle again to test nested source map chaining
for (let order of [0, 1, 2]) {
- const fileToTest = `out.${ext}`
- const nestedEntry = path.join(tempDir, `nested-entry.${ext}`)
- await fs.writeFile(path.join(tempDir, `extra.${ext}`), `console.log('extra')`)
+ const infile = outfile
+ const outfile2 = 'nested.' + infile
+ const nestedEntry = path.join(tempDir, `nested-entry.${infile}`)
+ await fs.writeFile(path.join(tempDir, `extra.${infile}`), `console.log('extra')`)
await fs.writeFile(nestedEntry,
- order === 1 ? `import './${fileToTest}'; import './extra.${ext}'` :
- order === 2 ? `import './extra.${ext}'; import './${fileToTest}'` :
- `import './${fileToTest}'`)
+ order === 1 ? `import './${infile}'; import './extra.${infile}'` :
+ order === 2 ? `import './extra.${infile}'; import './${infile}'` :
+ `import './${infile}'`)
await execFileAsync(esbuildPath, [
nestedEntry,
'--bundle',
- '--outfile=' + path.join(tempDir, `out2.${ext}`),
+ '--outfile=' + path.join(tempDir, outfile2),
'--sourcemap',
], { cwd: testDir })
- const out2Code = await fs.readFile(path.join(tempDir, `out2.${ext}`), 'utf8')
- recordCheck(out2Code.includes(`# sourceMappingURL=out2.${ext}.map`), `.${ext} file must link to .${ext}.map`)
- const out2CodeMap = await fs.readFile(path.join(tempDir, `out2.${ext}.map`), 'utf8')
+ const out2Code = await fs.readFile(path.join(tempDir, outfile2), 'utf8')
+ recordCheck(out2Code.includes(`# sourceMappingURL=${encodeURIComponent(outfile2)}.map`), `${outfile2} file must link to ${outfile2}.map`)
+ const out2CodeMap = await fs.readFile(path.join(tempDir, `${outfile2}.map`), 'utf8')
const out2Map = await new SourceMapConsumer(out2CodeMap)
checkMap(out2Code, out2Map)
@@ -741,157 +827,182 @@ async function main() {
const suffix = (crlf ? '-crlf' : '') + (minify ? '-min' : '')
promises.push(
check('commonjs' + suffix, testCaseCommonJS, toSearchBundle, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['a.js'],
crlf,
}),
check('es6' + suffix, testCaseES6, toSearchBundle, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['a.js'],
crlf,
}),
check('discontiguous' + suffix, testCaseDiscontiguous, toSearchBundle, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['a.js'],
crlf,
}),
check('ts' + suffix, testCaseTypeScriptRuntime, toSearchNoBundleTS, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js'),
+ outfile: 'out.js',
+ flags,
entryPoints: ['a.ts'],
crlf,
}),
check('stdin-stdout' + suffix, testCaseStdin, toSearchNoBundle, {
- ext: 'js',
flags: flags.concat('--sourcefile='),
entryPoints: [],
crlf,
}),
check('empty' + suffix, testCaseEmptyFile, toSearchEmptyFile, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['entry.js'],
crlf,
}),
check('non-js' + suffix, testCaseNonJavaScriptFile, toSearchNonJavaScriptFile, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['entry.js'],
crlf,
}),
check('splitting' + suffix, testCaseCodeSplitting, toSearchCodeSplitting, {
- ext: 'js',
+ outfile: 'out.js',
flags: flags.concat('--outdir=.', '--bundle', '--splitting', '--format=esm'),
entryPoints: ['out.ts', 'other.ts'],
crlf,
}),
check('unicode' + suffix, testCaseUnicode, toSearchUnicode, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--charset=utf8'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--charset=utf8'),
entryPoints: ['entry.js'],
crlf,
}),
check('unicode-globalName' + suffix, testCaseUnicode, toSearchUnicode, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--global-name=πππ', '--charset=utf8'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--global-name=πππ', '--charset=utf8'),
entryPoints: ['entry.js'],
crlf,
}),
check('dummy' + suffix, testCasePartialMappings, toSearchPartialMappings, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['entry.js'],
crlf,
}),
check('dummy' + suffix, testCasePartialMappingsPercentEscape, toSearchPartialMappings, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['entry.js'],
crlf,
}),
check('banner-footer' + suffix, testCaseES6, toSearchBundle, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--banner:js="/* LICENSE abc */"', '--footer:js="/* end of file banner */"'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--banner:js="/* LICENSE abc */"', '--footer:js="/* end of file banner */"'),
entryPoints: ['a.js'],
crlf,
}),
check('complex' + suffix, testCaseComplex, toSearchComplex, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--define:process.env.NODE_ENV="production"'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--define:process.env.NODE_ENV="production"'),
entryPoints: ['entry.js'],
crlf,
}),
check('dynamic-import' + suffix, testCaseDynamicImport, toSearchDynamicImport, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--external:./ext/*', '--format=esm'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--external:./ext/*', '--format=esm'),
entryPoints: ['entry.js'],
crlf,
followUpFlags: ['--external:./ext/*', '--format=esm'],
}),
check('dynamic-require' + suffix, testCaseDynamicImport, toSearchDynamicImport, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--external:./ext/*', '--format=cjs'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--external:./ext/*', '--format=cjs'),
entryPoints: ['entry.js'],
crlf,
followUpFlags: ['--external:./ext/*', '--format=cjs'],
}),
check('bundle-css' + suffix, testCaseBundleCSS, toSearchBundleCSS, {
- ext: 'css',
- flags: flags.concat('--outfile=out.css', '--bundle'),
+ outfile: 'out.css',
+ flags: flags.concat('--bundle'),
entryPoints: ['entry.css'],
crlf,
}),
check('jsx-runtime' + suffix, testCaseJSXRuntime, toSearchJSXRuntime, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--jsx=automatic', '--external:react/jsx-runtime'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--jsx=automatic', '--external:react/jsx-runtime'),
entryPoints: ['entry.jsx'],
crlf,
}),
check('jsx-dev-runtime' + suffix, testCaseJSXRuntime, toSearchJSXRuntime, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--jsx=automatic', '--jsx-dev', '--external:react/jsx-dev-runtime'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--jsx=automatic', '--jsx-dev', '--external:react/jsx-dev-runtime'),
entryPoints: ['entry.jsx'],
crlf,
}),
+ check('file-name-with-spaces' + suffix, testCaseFileNameWithSpaces, toSearchFileNameWithSpaces, {
+ outfile: 'output name with spaces.js',
+ flags: flags.concat('--bundle'),
+ entryPoints: ['file name with spaces.js'],
+ crlf,
+ }),
+ check('absolute-source-mapping-url' + suffix, testCaseAbsoluteSourceMappingURL, toSearchAbsoluteSourceMappingURL, {
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
+ entryPoints: ['entry.js'],
+ crlf,
+ }),
+ check('absolute-sources-url' + suffix, testCaseAbsoluteSourcesURL, toSearchAbsoluteSourcesURL, {
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
+ entryPoints: ['entry.js'],
+ crlf,
+ }),
// Checks for the "names" field
checkNames('names' + suffix, testCaseNames, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['entry.js'],
crlf,
}),
checkNames('names-mangle' + suffix, testCaseNames, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--mangle-props=^mangle_$'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--mangle-props=^mangle_$'),
entryPoints: ['entry.js'],
crlf,
}),
checkNames('names-mangle-quoted' + suffix, testCaseNames, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle', '--mangle-props=^mangle_$', '--mangle-quoted'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle', '--mangle-props=^mangle_$', '--mangle-quoted'),
entryPoints: ['entry.js'],
crlf,
}),
// Checks for loading missing "sourcesContent" in nested source maps
check('missing-sources-content' + suffix, testCaseMissingSourcesContent, toSearchMissingSourcesContent, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['foo.js'],
crlf,
}),
// Checks for null entries in "sourcesContent" in nested source maps
check('null-sources-content' + suffix, testCaseNullSourcesContent, toSearchNullSourcesContent, {
- ext: 'js',
- flags: flags.concat('--outfile=out.js', '--bundle'),
+ outfile: 'out.js',
+ flags: flags.concat('--bundle'),
entryPoints: ['foo.js'],
crlf,
}),
+
+ // This checks for issues with files in a bundle that don't emit source maps
+ check('splitting-empty' + suffix, testCaseCodeSplittingEmptyFile, toSearchCodeSplittingEmptyFile, {
+ flags: flags.concat('--outdir=.', '--bundle', '--splitting', '--format=esm'),
+ entryPoints: ['entry1.ts', 'entry2.ts'],
+ crlf,
+ checkFirstChunk: true,
+ })
)
}
}
diff --git a/version.txt b/version.txt
index 610e28725be..d21d277be51 100644
--- a/version.txt
+++ b/version.txt
@@ -1 +1 @@
-0.23.1
+0.25.0